dev@10: import { dev@236: Component, dev@236: OnInit, dev@236: ViewChild, dev@236: ElementRef, dev@236: Input, dev@236: AfterViewInit, dev@236: NgZone, dev@236: OnDestroy, dev@236: ChangeDetectorRef dev@10: } from '@angular/core'; dev@196: import { dev@196: AudioPlayerService, AudioResource, dev@196: AudioResourceError dev@236: } from '../services/audio-player/audio-player.service'; dev@289: import wavesUI from 'waves-ui-piper'; dev@63: import { dev@64: FeatureExtractionService dev@236: } from '../services/feature-extraction/feature-extraction.service'; dev@236: import {Subscription} from 'rxjs/Subscription'; dev@63: import { dev@63: FeatureCollection, cannam@296: SimpleResponse, cannam@299: VectorFeature, cannam@299: MatrixFeature, cannam@299: TracksFeature dev@236: } from 'piper/HigherLevelUtilities'; dev@319: import {toSeconds, OutputDescriptor} from 'piper'; dev@236: import {FeatureList, Feature} from 'piper/Feature'; dev@81: import * as Hammer from 'hammerjs'; dev@236: import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram'; dev@8: dev@54: type Layer = any; dev@54: type Track = any; dev@59: type Colour = string; dev@6: dev@268: dev@268: dev@268: function* createColourGenerator(colours) { cannam@257: let index = 0; dev@268: const nColours = colours.length; cannam@257: while (true) { dev@268: yield colours[index = ++index % nColours]; cannam@257: } dev@268: } dev@268: dev@268: const defaultColourGenerator = createColourGenerator([ dev@268: '#0868ac', // "sapphire blue", our waveform / header colour dev@268: '#c33c54', // "brick red" dev@268: '#17bebb', // "tiffany blue" dev@268: '#001021', // "rich black" dev@268: '#fa8334', // "mango tango" dev@268: '#034748' // "deep jungle green" dev@268: ]); cannam@257: dev@319: type HigherLevelFeatureShape = 'regions' | 'instants' | 'notes'; dev@319: type NoteLikeUnit = 'midi' | 'hz' ; dev@319: interface Note { dev@319: time: number; dev@319: duration: number; dev@319: pitch: number; dev@319: velocity?: number; dev@319: } dev@319: dev@6: @Component({ dev@236: selector: 'ugly-waveform', dev@6: templateUrl: './waveform.component.html', dev@6: styleUrls: ['./waveform.component.css'] dev@6: }) cannam@257: dev@51: export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { dev@20: dev@8: @ViewChild('track') trackDiv: ElementRef; dev@285: @Input() set width(width: number) { dev@285: if (this.timeline) { dev@285: requestAnimationFrame(() => { dev@285: this.timeline.timeContext.visibleWidth = width; dev@285: this.timeline.tracks.update(); dev@285: }); dev@285: } dev@285: } dev@189: @Input() timeline: Timeline; dev@189: @Input() trackIdPrefix: string; dev@196: @Input() set isSubscribedToExtractionService(isSubscribed: boolean) { dev@196: if (isSubscribed) { dev@196: if (this.featureExtractionSubscription) { dev@196: return; dev@196: } dev@268: dev@196: this.featureExtractionSubscription = dev@196: this.piperService.featuresExtracted$.subscribe( dev@196: features => { dev@268: this.renderFeatures(features, defaultColourGenerator.next().value); dev@196: }); dev@196: } else { dev@196: if (this.featureExtractionSubscription) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: @Input() set isSubscribedToAudioService(isSubscribed: boolean) { dev@196: this._isSubscribedToAudioService = isSubscribed; dev@196: if (isSubscribed) { dev@196: if (this.onAudioDataSubscription) { dev@196: return; dev@196: } dev@196: dev@196: this.onAudioDataSubscription = dev@196: this.audioService.audioLoaded$.subscribe(res => { dev@196: const wasError = (res as AudioResourceError).message != null; dev@196: dev@196: if (wasError) { dev@196: console.warn('No audio, display error?'); dev@196: } else { dev@196: this.audioBuffer = (res as AudioResource).samples; dev@196: } dev@196: }); dev@196: } else { dev@196: if (this.onAudioDataSubscription) { dev@196: this.onAudioDataSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: dev@196: get isSubscribedToAudioService(): boolean { dev@196: return this._isSubscribedToAudioService; dev@196: } dev@196: dev@196: @Input() set isOneShotExtractor(isOneShot: boolean) { dev@196: this._isOneShotExtractor = isOneShot; dev@196: } dev@196: dev@196: get isOneShotExtractor(): boolean { dev@196: return this._isOneShotExtractor; dev@196: } dev@196: dev@196: @Input() set isSeeking(isSeeking: boolean) { dev@196: this._isSeeking = isSeeking; dev@196: if (isSeeking) { dev@196: if (this.seekedSubscription) { dev@196: return; dev@196: } dev@236: if (this.playingStateSubscription) { dev@196: return; dev@196: } dev@196: dev@196: this.seekedSubscription = this.audioService.seeked$.subscribe(() => { dev@337: if (!this.audioService.isPlaying()) { dev@196: this.animate(); dev@236: } dev@196: }); dev@196: this.playingStateSubscription = dev@196: this.audioService.playingStateChange$.subscribe( dev@196: isPlaying => { dev@337: if (isPlaying) { dev@196: this.animate(); dev@236: } dev@196: }); dev@196: } else { dev@341: if (this.cursorLayer && this.waveTrack) { dev@341: this.waveTrack.remove(this.cursorLayer); dev@341: } dev@196: if (this.playingStateSubscription) { dev@196: this.playingStateSubscription.unsubscribe(); dev@196: } dev@196: if (this.seekedSubscription) { dev@196: this.seekedSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: dev@196: get isSeeking(): boolean { dev@196: return this._isSeeking; dev@196: } dev@196: dev@16: set audioBuffer(buffer: AudioBuffer) { dev@16: this._audioBuffer = buffer || undefined; cannam@117: if (this.audioBuffer) { dev@20: this.renderWaveform(this.audioBuffer); dev@180: // this.renderSpectrogram(this.audioBuffer); cannam@117: } dev@16: } dev@16: dev@16: get audioBuffer(): AudioBuffer { dev@16: return this._audioBuffer; dev@16: } dev@16: dev@196: private _audioBuffer: AudioBuffer; dev@196: private _isSubscribedToAudioService: boolean; dev@196: private _isOneShotExtractor: boolean; dev@196: private _isSeeking: boolean; dev@196: private cursorLayer: any; cannam@254: private highlightLayer: any; dev@196: private layers: Layer[]; dev@51: private featureExtractionSubscription: Subscription; dev@53: private playingStateSubscription: Subscription; dev@53: private seekedSubscription: Subscription; dev@196: private onAudioDataSubscription: Subscription; dev@155: private zoomOnMouseDown: number; dev@157: private offsetOnMouseDown: number; dev@196: private hasShot: boolean; dev@196: private isLoading: boolean; dev@341: private waveTrack: Track; dev@51: dev@236: private static changeColour(layer: Layer, colour: string): void { dev@236: const butcherShapes = (shape) => { dev@236: shape.install({color: () => colour}); dev@236: shape.params.color = colour; dev@236: shape.update(layer._renderingContext, layer.data); dev@236: }; dev@236: dev@236: layer._$itemCommonShapeMap.forEach(butcherShapes); dev@236: layer._$itemShapeMap.forEach(butcherShapes); dev@236: layer.render(); dev@236: layer.update(); dev@236: } dev@236: dev@31: constructor(private audioService: AudioPlayerService, dev@51: private piperService: FeatureExtractionService, dev@234: private ngZone: NgZone, dev@234: private ref: ChangeDetectorRef) { dev@196: this.isSubscribedToAudioService = true; dev@196: this.isSeeking = true; dev@185: this.layers = []; dev@196: this.audioBuffer = undefined; dev@54: this.timeline = undefined; dev@54: this.cursorLayer = undefined; cannam@254: this.highlightLayer = undefined; dev@196: this.isLoading = true; dev@51: } dev@51: dev@53: ngOnInit() { dev@53: } dev@10: dev@10: ngAfterViewInit(): void { dev@236: this.trackIdPrefix = this.trackIdPrefix || 'default'; dev@196: if (this.timeline) { dev@196: this.renderTimeline(null, true, true); dev@196: } else { dev@196: this.renderTimeline(); dev@196: } dev@20: } dev@20: dev@196: renderTimeline(duration: number = 1.0, dev@196: useExistingDuration: boolean = false, dev@196: isInitialRender: boolean = false): Timeline { dev@18: const track: HTMLElement = this.trackDiv.nativeElement; dev@236: track.innerHTML = ''; dev@18: const height: number = track.getBoundingClientRect().height; dev@18: const width: number = track.getBoundingClientRect().width; dev@18: const pixelsPerSecond = width / duration; dev@196: const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline; dev@196: dev@196: if (hasExistingTimeline) { dev@196: if (!useExistingDuration) { dev@196: this.timeline.pixelsPerSecond = pixelsPerSecond; dev@196: this.timeline.visibleWidth = width; dev@196: } dev@180: } else { dev@180: this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); dev@180: } dev@341: this.waveTrack = this.timeline.createTrack( dev@196: track, dev@196: height, dev@196: `wave-${this.trackIdPrefix}` dev@196: ); dev@196: if (isInitialRender && hasExistingTimeline) { dev@196: // time axis dev@196: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@196: height: height, dev@196: color: '#b0b0b0' dev@196: }); dev@341: this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true); dev@196: this.cursorLayer = new wavesUI.helpers.CursorLayer({ cannam@257: height: height, cannam@257: color: '#c33c54' dev@196: }); dev@341: this.addLayer( dev@341: this.cursorLayer, dev@341: this.waveTrack, dev@341: this.timeline.timeContext dev@341: ); dev@196: } dev@196: if ('ontouchstart' in window) { dev@196: interface Point { dev@196: x: number; dev@196: y: number; dev@196: } dev@196: dev@236: let zoomGestureJustEnded = false; dev@196: dev@196: const pixelToExponent: Function = wavesUI.utils.scales.linear() dev@196: .domain([0, 100]) // 100px => factor 2 dev@196: .range([0, 1]); dev@196: dev@196: const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => { dev@196: return Math.pow( dev@196: Math.pow(p2.x - p1.x, 2) + dev@196: Math.pow(p2.y - p1.y, 2), 0.5); dev@196: }; dev@196: dev@205: const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => { dev@205: return { dev@205: x: 0.5 * (p1.x + p2.x), dev@205: y: 0.5 * (p1.y + p2.y) dev@205: }; dev@205: }; dev@205: dev@205: const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, { dev@205: recognizers: [ dev@205: [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }] dev@205: ] dev@205: }); dev@204: dev@204: // it seems HammerJs binds the event to the window? dev@204: // causing these events to propagate to other components? dev@204: const componentTimeline = this.timeline; dev@204: let initialZoom; dev@204: let initialDistance; dev@204: let offsetAtPanStart; dev@205: let startX; dev@205: let isZooming; dev@204: dev@196: const scroll = (ev) => { dev@236: if (ev.center.x - startX === 0) { dev@236: return; dev@236: } dev@236: dev@196: if (zoomGestureJustEnded) { dev@196: zoomGestureJustEnded = false; dev@236: console.log('Skip this event: likely a single touch dangling from pinch'); dev@196: return; dev@196: } dev@204: componentTimeline.timeContext.offset = offsetAtPanStart + dev@204: componentTimeline.timeContext.timeToPixel.invert(ev.deltaX); dev@204: componentTimeline.tracks.update(); dev@196: }; dev@196: dev@196: const zoom = (ev) => { dev@236: if (ev.touches.length < 2) { dev@236: return; dev@236: } dev@236: dev@214: ev.preventDefault(); dev@204: const minZoom = componentTimeline.state.minZoom; dev@204: const maxZoom = componentTimeline.state.maxZoom; dev@205: const p1: Point = { dev@218: x: ev.touches[0].clientX, dev@218: y: ev.touches[0].clientY dev@205: }; dev@205: const p2: Point = { dev@218: x: ev.touches[1].clientX, dev@218: y: ev.touches[1].clientY dev@205: }; dev@205: const distance = calculateDistance(p1, p2); dev@205: const midPoint = calculateMidPoint(p1, p2); dev@196: dev@196: const lastCenterTime = dev@205: componentTimeline.timeContext.timeToPixel.invert(midPoint.x); dev@196: dev@204: const exponent = pixelToExponent(distance - initialDistance); dev@204: const targetZoom = initialZoom * Math.pow(2, exponent); dev@196: dev@204: componentTimeline.timeContext.zoom = dev@196: Math.min(Math.max(targetZoom, minZoom), maxZoom); dev@196: dev@196: const newCenterTime = dev@205: componentTimeline.timeContext.timeToPixel.invert(midPoint.x); dev@196: dev@204: componentTimeline.timeContext.offset += newCenterTime - lastCenterTime; dev@204: componentTimeline.tracks.update(); dev@196: }; dev@205: hammertime.on('panstart', (ev) => { dev@204: offsetAtPanStart = componentTimeline.timeContext.offset; dev@205: startX = ev.center.x; dev@196: }); dev@196: hammertime.on('panleft', scroll); dev@196: hammertime.on('panright', scroll); dev@205: dev@205: dev@205: const element: HTMLElement = this.trackDiv.nativeElement; dev@205: element.addEventListener('touchstart', (e) => { dev@236: if (e.touches.length < 2) { dev@236: return; dev@236: } dev@236: dev@205: isZooming = true; dev@204: initialZoom = componentTimeline.timeContext.zoom; dev@196: dev@204: initialDistance = calculateDistance({ dev@218: x: e.touches[0].clientX, dev@218: y: e.touches[0].clientY dev@196: }, { dev@218: x: e.touches[1].clientX, dev@218: y: e.touches[1].clientY dev@196: }); dev@196: }); dev@205: element.addEventListener('touchend', () => { dev@205: if (isZooming) { dev@205: isZooming = false; dev@205: zoomGestureJustEnded = true; dev@205: } dev@301: }); dev@205: element.addEventListener('touchmove', zoom); dev@196: } dev@189: // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`); dev@189: // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`); dev@54: } dev@18: cannam@108: estimatePercentile(matrix, percentile) { cannam@108: // our sample is not evenly distributed across the whole data set: cannam@108: // it is guaranteed to include at least one sample from every cannam@108: // column, and could sample some values more than once. But it cannam@108: // should be good enough in most cases (todo: show this) cannam@109: if (matrix.length === 0) { cannam@109: return 0.0; cannam@109: } cannam@108: const w = matrix.length; cannam@108: const h = matrix[0].length; cannam@108: const n = w * h; cannam@109: const m = (n > 50000 ? 50000 : n); // should base that on the %ile cannam@108: let m_per = Math.floor(m / w); dev@236: if (m_per < 1) { dev@236: m_per = 1; dev@236: } dev@236: dev@236: const sample = []; cannam@108: for (let x = 0; x < w; ++x) { cannam@108: for (let i = 0; i < m_per; ++i) { cannam@108: const y = Math.floor(Math.random() * h); cannam@109: const value = matrix[x][y]; cannam@109: if (!isNaN(value) && value !== Infinity) { cannam@109: sample.push(value); cannam@109: } cannam@108: } cannam@108: } cannam@109: if (sample.length === 0) { dev@236: console.log('WARNING: No samples gathered, even though we hoped for ' + dev@301: (m_per * w) + ' of them'); cannam@109: return 0.0; cannam@109: } dev@236: sample.sort((a, b) => { return a - b; }); cannam@108: const ix = Math.floor((sample.length * percentile) / 100); dev@236: console.log('Estimating ' + percentile + '-%ile of ' + dev@301: n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix + dev@301: ' of sorted ' + sample.length + '-sample subset'); cannam@108: const estimate = sample[ix]; dev@236: console.log('Estimate is: ' + estimate + ' (where min sampled value = ' + dev@301: sample[0] + ' and max = ' + sample[sample.length - 1] + ')'); cannam@108: return estimate; cannam@108: } cannam@108: cannam@108: interpolatingMapper(hexColours) { cannam@108: const colours = hexColours.map(n => { cannam@108: const i = parseInt(n, 16); cannam@118: return [ ((i >> 16) & 255) / 255.0, dev@301: ((i >> 8) & 255) / 255.0, dev@301: ((i) & 255) / 255.0 ]; cannam@108: }); cannam@108: const last = colours.length - 1; cannam@108: return (value => { cannam@108: const m = value * last; cannam@108: if (m >= last) { cannam@108: return colours[last]; cannam@108: } cannam@108: if (m <= 0) { cannam@108: return colours[0]; cannam@108: } cannam@108: const base = Math.floor(m); cannam@108: const prop0 = base + 1.0 - m; cannam@108: const prop1 = m - base; cannam@108: const c0 = colours[base]; dev@236: const c1 = colours[base + 1]; cannam@118: return [ c0[0] * prop0 + c1[0] * prop1, dev@301: c0[1] * prop0 + c1[1] * prop1, dev@301: c0[2] * prop0 + c1[2] * prop1 ]; cannam@108: }); cannam@108: } dev@110: cannam@108: iceMapper() { dev@236: const hexColours = [ cannam@108: // Based on ColorBrewer ylGnBu dev@236: 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5', dev@236: '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040' cannam@108: ]; cannam@108: hexColours.reverse(); cannam@108: return this.interpolatingMapper(hexColours); cannam@108: } dev@110: cannam@118: hsv2rgb(h, s, v) { // all values in range [0, 1] cannam@118: const i = Math.floor(h * 6); cannam@118: const f = h * 6 - i; cannam@118: const p = v * (1 - s); cannam@118: const q = v * (1 - f * s); cannam@118: const t = v * (1 - (1 - f) * s); cannam@118: let r = 0, g = 0, b = 0; cannam@118: switch (i % 6) { dev@301: case 0: r = v; g = t; b = p; break; dev@301: case 1: r = q; g = v; b = p; break; dev@301: case 2: r = p; g = v; b = t; break; dev@301: case 3: r = p; g = q; b = v; break; dev@301: case 4: r = t; g = p; b = v; break; dev@301: case 5: r = v; g = p; b = q; break; cannam@118: } cannam@118: return [ r, g, b ]; cannam@118: } dev@122: cannam@118: greenMapper() { cannam@118: const blue = 0.6666; cannam@118: const pieslice = 0.3333; cannam@118: return (value => { cannam@118: const h = blue - value * 2.0 * pieslice; cannam@118: const s = 0.5 + value / 2.0; cannam@118: const v = value; cannam@118: return this.hsv2rgb(h, s, v); cannam@118: }); cannam@118: } cannam@118: cannam@118: sunsetMapper() { cannam@118: return (value => { dev@236: const r = (value - 0.24) * 2.38; dev@236: const g = (value - 0.64) * 2.777; cannam@118: let b = (3.6 * value); dev@236: if (value > 0.277) { dev@236: b = 2.0 - b; dev@236: } cannam@118: return [ r, g, b ]; cannam@118: }); cannam@118: } cannam@118: dev@122: clearTimeline(): void { dev@122: // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly dev@122: const timeContextChildren = this.timeline.timeContext._children; dev@236: for (const track of this.timeline.tracks) { dev@122: if (track.layers.length === 0) { continue; } dev@122: const trackLayers = Array.from(track.layers); dev@122: while (trackLayers.length) { dev@236: const layer: Layer = trackLayers.pop(); dev@185: if (this.layers.includes(layer)) { dev@185: track.remove(layer); dev@185: this.layers.splice(this.layers.indexOf(layer), 1); dev@185: const index = timeContextChildren.indexOf(layer.timeContext); dev@185: if (index >= 0) { dev@185: timeContextChildren.splice(index, 1); dev@185: } dev@185: layer.destroy(); dev@122: } dev@122: } dev@122: } dev@122: } dev@122: dev@54: renderWaveform(buffer: AudioBuffer): void { dev@341: const height = this.trackDiv.nativeElement.getBoundingClientRect().height; dev@54: if (this.timeline) { dev@54: // resize dev@54: const width = this.trackDiv.nativeElement.getBoundingClientRect().width; dev@55: dev@122: this.clearTimeline(); dev@59: dev@54: this.timeline.visibleWidth = width; dev@54: this.timeline.pixelsPerSecond = width / buffer.duration; dev@341: this.waveTrack.height = height; dev@54: } else { dev@236: this.renderTimeline(buffer.duration); dev@54: } dev@83: this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration; cannam@106: dev@18: // time axis dev@18: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@18: height: height, cannam@106: color: '#b0b0b0' dev@18: }); dev@341: this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true); dev@18: cannam@161: const nchannels = buffer.numberOfChannels; cannam@161: const totalWaveHeight = height * 0.9; cannam@161: const waveHeight = totalWaveHeight / nchannels; dev@189: cannam@161: for (let ch = 0; ch < nchannels; ++ch) { dev@236: console.log('about to construct a waveform layer for channel ' + ch); cannam@161: const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { dev@236: top: (height - totalWaveHeight) / 2 + waveHeight * ch, dev@236: height: waveHeight, cannam@257: color: '#0868ac', dev@236: channel: ch cannam@161: }); dev@341: this.addLayer(waveformLayer, this.waveTrack, this.timeline.timeContext); cannam@161: } cannam@117: dev@53: this.cursorLayer = new wavesUI.helpers.CursorLayer({ cannam@257: height: height, cannam@257: color: '#c33c54' dev@31: }); dev@341: this.addLayer(this.cursorLayer, this.waveTrack, this.timeline.timeContext); dev@51: this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline); dev@341: this.waveTrack.render(); dev@341: this.waveTrack.update(); dev@81: dev@196: this.isLoading = false; dev@234: this.ref.markForCheck(); dev@53: this.animate(); dev@53: } dev@53: cannam@117: renderSpectrogram(buffer: AudioBuffer): void { cannam@117: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@189: const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`); cannam@117: dev@129: const spectrogramLayer = new WavesSpectrogramLayer(buffer, { cannam@221: top: 0, cannam@221: height: height, cannam@117: stepSize: 512, dev@129: blockSize: 1024, cannam@118: normalise: 'none', cannam@118: mapper: this.sunsetMapper() cannam@117: }); cannam@117: this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext); cannam@117: cannam@117: this.timeline.tracks.update(); cannam@117: } cannam@117: cannam@308: private addLineLayers(features: VectorFeature[], cannam@313: unit: string, cannam@308: colour: Colour) { cannam@298: cannam@308: // Winnow out empty features cannam@308: features = features.filter(feature => (feature.data.length > 0)); dev@316: cannam@308: // First establish a [min,max] range across all of the features cannam@308: let [min, max] = features.reduce((acc, feature) => { cannam@308: return feature.data.reduce((acc, val) => { cannam@308: const [min, max] = acc; cannam@308: return [Math.min (min, val), Math.max (max, val)]; cannam@308: }, acc); cannam@308: }, [Infinity, -Infinity]); cannam@308: dev@316: console.log('addLineLayers: ' + features.length + ' non-empty features, overall min = ' + min + ', max = ' + max); cannam@308: cannam@298: if (min === Infinity) { cannam@298: min = 0; cannam@298: max = 1; cannam@298: } cannam@308: cannam@298: if (min !== min || max !== max) { dev@316: console.log('WARNING: min or max is NaN'); cannam@298: min = 0; cannam@298: max = 1; cannam@298: } cannam@298: cannam@298: const height = this.trackDiv.nativeElement.getBoundingClientRect().height; cannam@308: cannam@308: // Now add a line layer for each vector feature cannam@308: const lineLayers = features.map(feature => { cannam@308: cannam@309: let duration = 0; cannam@309: cannam@309: // Give the plot items positions relative to the start of the cannam@309: // line, rather than relative to absolute time 0. This is cannam@309: // because we'll be setting the layer timeline start property cannam@309: // later on and these will be positioned relative to that dev@316: cannam@308: const plotData = [...feature.data].map((val, i) => { cannam@309: const t = i * feature.stepDuration; cannam@309: duration = t + feature.stepDuration; cannam@308: return { cannam@309: cx: t, cannam@308: cy: val cannam@308: }; cannam@308: }); dev@316: cannam@308: const lineLayer = new wavesUI.helpers.LineLayer(plotData, { cannam@308: color: colour, cannam@308: height: height, cannam@308: yDomain: [ min, max ] cannam@308: }); cannam@308: this.addLayer( cannam@308: lineLayer, dev@341: this.waveTrack, cannam@308: this.timeline.timeContext cannam@308: ); cannam@308: cannam@309: // Set start and duration so that the highlight layer can use cannam@309: // them to determine which line to draw values from cannam@309: lineLayer.start = feature.startTime; cannam@309: lineLayer.duration = duration; dev@316: cannam@308: return lineLayer; cannam@298: }); cannam@309: cannam@309: // And a single scale layer at left dev@316: // !!! todo: unit in scale layer cannam@298: const scaleLayer = new wavesUI.helpers.ScaleLayer({ cannam@298: tickColor: colour, cannam@298: textColor: colour, cannam@298: height: height, cannam@298: yDomain: [ min, max ] cannam@298: }); cannam@298: this.addLayer( cannam@298: scaleLayer, dev@341: this.waveTrack, cannam@298: this.timeline.timeContext cannam@298: ); cannam@308: cannam@309: // And a single highlight layer which uses all of the line layers cannam@309: // as its source material cannam@308: this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, { cannam@298: opacity: 0.7, cannam@298: height: height, cannam@298: color: '#c33c54', cannam@298: labelOffset: 38, cannam@313: yDomain: [ min, max ], cannam@313: unit cannam@298: }); cannam@298: this.addLayer( cannam@298: this.highlightLayer, dev@341: this.waveTrack, cannam@298: this.timeline.timeContext cannam@298: ); cannam@298: } dev@303: dev@53: // TODO refactor - this doesn't belong here dev@64: private renderFeatures(extracted: SimpleResponse, colour: Colour): void { dev@196: if (this.isOneShotExtractor && !this.hasShot) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: this.hasShot = true; dev@196: } dev@196: dev@236: if (!extracted.hasOwnProperty('features') cannam@296: || !extracted.hasOwnProperty('outputDescriptor')) { dev@236: return; dev@236: } dev@236: if (!extracted.features.hasOwnProperty('shape') dev@301: || !extracted.features.hasOwnProperty('collected')) { dev@236: return; dev@236: } dev@64: const features: FeatureCollection = (extracted.features as FeatureCollection); dev@64: const outputDescriptor = extracted.outputDescriptor; dev@196: const height = this.trackDiv.nativeElement.getBoundingClientRect().height; dev@64: dev@316: let unit = ''; cannam@313: if (outputDescriptor.configured.hasOwnProperty('unit')) { cannam@313: unit = outputDescriptor.configured.unit; cannam@313: } cannam@313: dev@64: // TODO refactor all of this dev@63: switch (features.shape) { cannam@298: cannam@298: case 'vector': { cannam@299: const collected = features.collected as VectorFeature; cannam@313: this.addLineLayers([collected], unit, colour); cannam@296: break; dev@64: } dev@303: cannam@308: case 'tracks': { cannam@308: const collected = features.collected as TracksFeature; cannam@313: this.addLineLayers(collected, unit, colour); cannam@308: break; cannam@308: } dev@316: dev@64: case 'list': { dev@301: const featureData = features.collected as FeatureList; dev@236: if (featureData.length === 0) { dev@236: return; dev@236: } dev@319: dev@64: // TODO refactor, this is incomprehensible dev@319: try { dev@319: const featureShape = deduceHigherLevelFeatureShape( dev@319: featureData, dev@319: outputDescriptor dev@122: ); dev@319: switch (featureShape) { dev@319: case 'instants': dev@319: const plotData = featureData.map(feature => ({ dev@319: time: toSeconds(feature.timestamp), dev@319: label: feature.label dev@319: })); dev@319: const featureLayer = new wavesUI.helpers.TickLayer(plotData, { dev@319: height: height, dev@319: color: colour, dev@319: labelPosition: 'bottom', dev@319: shadeSegments: true dev@67: }); dev@319: this.addLayer( dev@319: featureLayer, dev@341: this.waveTrack, dev@319: this.timeline.timeContext dev@319: ); dev@319: break; dev@319: case 'regions': dev@319: this.renderRegions( dev@319: featureData, dev@319: outputDescriptor, dev@341: this.waveTrack, dev@319: height, dev@319: colour dev@319: ); dev@319: break; dev@319: case 'notes': cannam@333: const notes = mapFeaturesToNotes(featureData, outputDescriptor); cannam@333: let [min, max] = notes.reduce((acc, note) => { cannam@333: const [min, max] = acc; cannam@333: return [Math.min (min, note.pitch), Math.max (max, note.pitch)]; cannam@333: }, [Infinity, -Infinity]); cannam@333: if (min === Infinity || min < 0 || max < 0) { cannam@333: min = 0; cannam@333: max = 127; cannam@333: } cannam@333: // round min and max to octave boundaries (starting at C as in MIDI) cannam@333: min = 12 * Math.floor(min / 12); cannam@333: max = 12 * Math.ceil(max / 12); dev@319: const pianoRollLayer = new wavesUI.helpers.PianoRollLayer( cannam@333: notes, cannam@333: {height: height, color: colour, yDomain: [min, max] } dev@319: ); dev@319: this.addLayer( dev@319: pianoRollLayer, dev@341: this.waveTrack, dev@319: this.timeline.timeContext dev@319: ); dev@319: break; dev@319: } dev@319: } catch (e) { dev@319: console.warn(e); // TODO display dev@319: break; dev@64: } dev@64: break; dev@64: } cannam@106: case 'matrix': { dev@303: const collected = features.collected as MatrixFeature; dev@316: const startTime = collected.startTime; // !!! + make use of cannam@296: const stepDuration = collected.stepDuration; cannam@296: const matrixData = collected.data; dev@335: dev@236: if (matrixData.length === 0) { dev@236: return; dev@236: } dev@236: dev@236: console.log('matrix data length = ' + matrixData.length); dev@236: console.log('height of first column = ' + matrixData[0].length); cannam@109: const targetValue = this.estimatePercentile(matrixData, 95); cannam@108: const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0); dev@236: console.log('setting gain to ' + gain); cannam@120: const matrixEntity = cannam@120: new wavesUI.utils.PrefilledMatrixEntity(matrixData, dev@301: 0, // startTime dev@303: stepDuration); dev@236: const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, { cannam@108: gain, cannam@221: top: 0, cannam@221: height: height, cannam@109: normalise: 'none', cannam@108: mapper: this.iceMapper() cannam@108: }); dev@122: this.addLayer( cannam@108: matrixLayer, dev@341: this.waveTrack, cannam@108: this.timeline.timeContext dev@122: ); cannam@108: break; cannam@106: } dev@67: default: dev@236: console.log( dev@236: `Cannot render an appropriate layer for feature shape '${features.shape}'` dev@236: ); dev@63: } dev@59: dev@196: this.isLoading = false; dev@234: this.ref.markForCheck(); dev@56: this.timeline.tracks.update(); dev@336: this.animate(); dev@53: } dev@53: dev@53: private animate(): void { dev@236: if (!this.isSeeking) { dev@236: return; dev@236: } dev@196: dev@31: this.ngZone.runOutsideAngular(() => { dev@31: // listen for time passing... dev@31: const updateSeekingCursor = () => { dev@53: const currentTime = this.audioService.getCurrentTime(); dev@53: this.cursorLayer.currentPosition = currentTime; dev@53: this.cursorLayer.update(); dev@53: dev@341: if (this.highlightLayer) { cannam@254: this.highlightLayer.currentPosition = currentTime; cannam@254: this.highlightLayer.update(); cannam@254: } cannam@254: dev@53: const currentOffset = this.timeline.timeContext.offset; dev@53: const offsetTimestamp = currentOffset dev@53: + currentTime; dev@53: dev@53: const visibleDuration = this.timeline.timeContext.visibleDuration; dev@53: // TODO reduce duplication between directions and make more declarative dev@53: // this kinda logic should also be tested dev@53: const mustPageForward = offsetTimestamp > visibleDuration; dev@53: const mustPageBackward = currentTime < -currentOffset; dev@53: dev@53: if (mustPageForward) { dev@53: const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; dev@53: dev@301: this.timeline.timeContext.offset = hasSkippedMultiplePages ? dev@301: -currentTime + 0.5 * visibleDuration : dev@301: currentOffset - visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@53: if (mustPageBackward) { dev@53: const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; dev@301: this.timeline.timeContext.offset = hasSkippedMultiplePages ? dev@301: -currentTime + 0.5 * visibleDuration : dev@301: currentOffset + visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@337: if (this.audioService.isPlaying()) { dev@53: requestAnimationFrame(updateSeekingCursor); dev@236: } dev@31: }; dev@31: updateSeekingCursor(); dev@31: }); dev@6: } dev@16: dev@319: // TODO not sure how much of the logic in here is actually sensible w.r.t dev@319: // what it functionally produces dev@319: private renderRegions(featureData: FeatureList, dev@319: outputDescriptor: OutputDescriptor, dev@319: waveTrack: any, dev@319: height: number, dev@319: colour: Colour) { dev@319: console.log('Output is of region type'); dev@319: const binCount = outputDescriptor.configured.binCount || 0; dev@319: const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ; dev@319: const getSegmentArgs = () => { dev@319: if (isBarRegion) { dev@319: dev@319: // TODO refactor - this is messy dev@319: interface FoldsToNumber { dev@319: reduce(fn: (previousValue: number, dev@319: currentValue: T, dev@319: currentIndex: number, dev@319: array: ArrayLike) => number, dev@319: initialValue?: number): number; dev@319: } dev@319: dev@319: // TODO potentially change impl., i.e avoid reduce dev@319: const findMin = (arr: FoldsToNumber, dev@319: getElement: (x: T) => number): number => { dev@319: return arr.reduce( dev@319: (min, val) => Math.min(min, getElement(val)), dev@319: Infinity dev@319: ); dev@319: }; dev@319: dev@319: const findMax = (arr: FoldsToNumber, dev@319: getElement: (x: T) => number): number => { dev@319: return arr.reduce( dev@319: (min, val) => Math.max(min, getElement(val)), dev@319: -Infinity dev@319: ); dev@319: }; dev@319: dev@319: const min = findMin(featureData, (x: Feature) => { dev@319: return findMin(x.featureValues, y => y); dev@319: }); dev@319: dev@319: const max = findMax(featureData, (x: Feature) => { dev@319: return findMax(x.featureValues, y => y); dev@319: }); dev@319: dev@319: const barHeight = 1.0 / height; dev@319: return [ dev@319: featureData.reduce((bars, feature) => { dev@319: const staticProperties = { dev@319: x: toSeconds(feature.timestamp), dev@319: width: toSeconds(feature.duration), dev@319: height: min + barHeight, dev@319: color: colour, dev@319: opacity: 0.8 dev@319: }; dev@319: // TODO avoid copying Float32Array to an array - map is problematic here dev@319: return bars.concat([...feature.featureValues] dev@319: .map(val => Object.assign({}, staticProperties, {y: val}))); dev@319: }, []), dev@319: {yDomain: [min, max + barHeight], height: height} as any dev@319: ]; dev@319: } else { dev@319: return [featureData.map(feature => ({ dev@319: x: toSeconds(feature.timestamp), dev@319: width: toSeconds(feature.duration), dev@319: color: colour, dev@319: opacity: 0.8 dev@319: })), {height: height}]; dev@319: } dev@319: }; dev@319: dev@319: const segmentLayer = new wavesUI.helpers.SegmentLayer( dev@319: ...getSegmentArgs() dev@319: ); dev@319: this.addLayer( dev@319: segmentLayer, dev@319: waveTrack, dev@319: this.timeline.timeContext dev@319: ); dev@319: } dev@319: dev@122: private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void { dev@54: timeContext.zoom = 1.0; dev@54: if (!layer.timeContext) { dev@54: layer.setTimeContext(isAxis ? dev@54: timeContext : new wavesUI.core.LayerTimeContext(timeContext)); dev@54: } dev@54: track.add(layer); dev@185: this.layers.push(layer); dev@54: layer.render(); dev@54: layer.update(); dev@122: if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) { dev@112: track.$layout.appendChild(this.cursorLayer.$el); dev@112: } dev@59: } dev@59: dev@51: ngOnDestroy(): void { dev@236: if (this.featureExtractionSubscription) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@236: } dev@236: if (this.playingStateSubscription) { dev@196: this.playingStateSubscription.unsubscribe(); dev@236: } dev@236: if (this.seekedSubscription) { dev@196: this.seekedSubscription.unsubscribe(); dev@236: } dev@236: if (this.onAudioDataSubscription) { dev@196: this.onAudioDataSubscription.unsubscribe(); dev@236: } dev@51: } dev@154: dev@155: seekStart(): void { dev@155: this.zoomOnMouseDown = this.timeline.timeContext.zoom; dev@157: this.offsetOnMouseDown = this.timeline.timeContext.offset; dev@155: } dev@155: dev@155: seekEnd(x: number): void { dev@157: const hasSameZoom: boolean = this.zoomOnMouseDown === dev@157: this.timeline.timeContext.zoom; dev@157: const hasSameOffset: boolean = this.offsetOnMouseDown === dev@157: this.timeline.timeContext.offset; dev@157: if (hasSameZoom && hasSameOffset) { dev@155: this.seek(x); dev@155: } dev@155: } dev@155: dev@154: seek(x: number): void { dev@154: if (this.timeline) { dev@154: const timeContext: any = this.timeline.timeContext; dev@341: const timeX = timeContext.timeToPixel.invert(x) - timeContext.offset; dev@196: if (this.isSeeking) { dev@341: this.audioService.seekTo(timeX); dev@341: } else { dev@341: if (this.highlightLayer) { dev@341: this.highlightLayer.currentPosition = timeX; dev@341: this.highlightLayer.update(); dev@341: } dev@196: } dev@154: } dev@154: } dev@6: } dev@319: dev@319: function deduceHigherLevelFeatureShape(featureData: FeatureList, dev@319: descriptor: OutputDescriptor) dev@319: : HigherLevelFeatureShape { dev@319: // TODO look at output descriptor instead of directly inspecting features dev@319: const hasDuration = descriptor.configured.hasDuration; dev@319: const binCount = descriptor.configured.binCount; dev@319: const isMarker = !hasDuration dev@319: && binCount === 0 dev@319: && featureData[0].featureValues == null; dev@319: dev@319: const isMaybeNote = getCanonicalNoteLikeUnit(descriptor.configured.unit) dev@319: && [1, 2].find(nBins => nBins === binCount); dev@319: dev@319: const isRegionLike = hasDuration && featureData[0].timestamp != null; dev@319: dev@319: const isNote = isMaybeNote && isRegionLike; dev@319: const isRegion = !isMaybeNote && isRegionLike; dev@319: if (isMarker) { dev@319: return 'instants'; dev@319: } dev@319: if (isNote) { dev@319: return 'notes'; dev@319: } dev@319: if (isRegion) { dev@319: return 'regions'; dev@319: } dev@335: throw new Error('No shape could be deduced'); dev@319: } dev@319: dev@319: function getCanonicalNoteLikeUnit(unit: string): NoteLikeUnit | null { dev@319: const canonicalUnits: NoteLikeUnit[] = ['midi', 'hz']; dev@319: return canonicalUnits.find(canonicalUnit => { dev@335: return unit.toLowerCase().indexOf(canonicalUnit) >= 0; dev@319: }); dev@319: } dev@319: dev@319: function mapFeaturesToNotes(featureData: FeatureList, dev@319: descriptor: OutputDescriptor): Note[] { dev@319: const canonicalUnit = getCanonicalNoteLikeUnit(descriptor.configured.unit); dev@319: const isHz = canonicalUnit === 'hz'; dev@319: return featureData.map(feature => ({ dev@319: time: toSeconds(feature.timestamp), dev@319: duration: toSeconds(feature.duration), dev@319: pitch: isHz ? dev@319: frequencyToMidiNote(feature.featureValues[0]) : feature.featureValues[0] dev@319: })); dev@319: } dev@319: dev@319: function frequencyToMidiNote(frequency: number, dev@319: concertA: number = 440.0): number { dev@319: return 69 + 12 * Math.log2(frequency / concertA); dev@319: }