dev@10: import { dev@51: Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone, dev@51: OnDestroy dev@10: } from '@angular/core'; dev@39: import {AudioPlayerService} from "../services/audio-player/audio-player.service"; dev@36: import wavesUI from 'waves-ui'; dev@63: import { dev@64: FeatureExtractionService dev@63: } from "../services/feature-extraction/feature-extraction.service"; dev@51: import {Subscription} from "rxjs"; dev@63: import { dev@63: FeatureCollection, dev@64: FixedSpacedFeatures, SimpleResponse dev@63: } from "piper/HigherLevelUtilities"; dev@53: import {toSeconds} from "piper"; dev@67: import {FeatureList, Feature} from "piper/Feature"; dev@81: import * as Hammer from 'hammerjs'; dev@129: import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram"; dev@8: dev@20: type Timeline = any; // TODO what type actually is it.. start a .d.ts for waves-ui? dev@54: type Layer = any; dev@54: type Track = any; dev@59: type Colour = string; dev@6: dev@6: @Component({ dev@6: selector: 'app-waveform', dev@6: templateUrl: './waveform.component.html', dev@6: styleUrls: ['./waveform.component.css'] dev@6: }) dev@51: export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { dev@20: dev@8: @ViewChild('track') trackDiv: ElementRef; dev@6: dev@54: private _audioBuffer: AudioBuffer; dev@54: private timeline: Timeline; dev@54: private cursorLayer: any; dev@16: dev@16: @Input() dev@16: set audioBuffer(buffer: AudioBuffer) { dev@16: this._audioBuffer = buffer || undefined; cannam@117: if (this.audioBuffer) { dev@20: this.renderWaveform(this.audioBuffer); cannam@117: this.renderSpectrogram(this.audioBuffer); cannam@117: } dev@16: } dev@16: dev@16: get audioBuffer(): AudioBuffer { dev@16: return this._audioBuffer; dev@16: } dev@16: dev@51: private featureExtractionSubscription: Subscription; dev@53: private playingStateSubscription: Subscription; dev@53: private seekedSubscription: Subscription; dev@53: private isPlaying: boolean; dev@110: private offsetAtPanStart: number; dev@110: private initialZoom: number; dev@110: private initialDistance: number; dev@155: private zoomOnMouseDown: number; dev@157: private offsetOnMouseDown: number; dev@51: dev@31: constructor(private audioService: AudioPlayerService, dev@51: private piperService: FeatureExtractionService, dev@51: public ngZone: NgZone) { dev@54: this._audioBuffer = undefined; dev@54: this.timeline = undefined; dev@54: this.cursorLayer = undefined; dev@53: this.isPlaying = false; dev@59: const colours = function* () { dev@59: const circularColours = [ dev@59: 'black', dev@59: 'red', dev@59: 'green', dev@59: 'purple', dev@59: 'orange' dev@59: ]; dev@59: let index = 0; dev@59: const nColours = circularColours.length; dev@59: while (true) { dev@59: yield circularColours[index = ++index % nColours]; dev@59: } dev@59: }(); dev@59: dev@51: this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe( dev@51: features => { dev@59: this.renderFeatures(features, colours.next().value); dev@51: }); dev@53: this.playingStateSubscription = audioService.playingStateChange$.subscribe( dev@53: isPlaying => { dev@53: this.isPlaying = isPlaying; dev@53: if (this.isPlaying) dev@53: this.animate(); dev@53: }); dev@53: this.seekedSubscription = audioService.seeked$.subscribe(() => { dev@53: if (!this.isPlaying) dev@53: this.animate(); dev@53: }); dev@51: } dev@51: dev@53: ngOnInit() { dev@53: } dev@10: dev@10: ngAfterViewInit(): void { dev@51: this.timeline = this.renderTimeline(); dev@20: } dev@20: dev@20: renderTimeline(duration: number = 1.0): Timeline { dev@18: const track: HTMLElement = this.trackDiv.nativeElement; dev@20: track.innerHTML = ""; dev@18: const height: number = track.getBoundingClientRect().height; dev@18: const width: number = track.getBoundingClientRect().width; dev@18: const pixelsPerSecond = width / duration; dev@18: const timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); cannam@117: timeline.createTrack(track, height/2, 'wave'); cannam@117: timeline.createTrack(track, height/2, 'grid'); dev@54: return timeline; dev@54: } dev@18: cannam@108: estimatePercentile(matrix, percentile) { cannam@108: // our sample is not evenly distributed across the whole data set: cannam@108: // it is guaranteed to include at least one sample from every cannam@108: // column, and could sample some values more than once. But it cannam@108: // should be good enough in most cases (todo: show this) cannam@109: if (matrix.length === 0) { cannam@109: return 0.0; cannam@109: } cannam@108: const w = matrix.length; cannam@108: const h = matrix[0].length; cannam@108: const n = w * h; cannam@109: const m = (n > 50000 ? 50000 : n); // should base that on the %ile cannam@108: let m_per = Math.floor(m / w); cannam@108: if (m_per < 1) m_per = 1; cannam@108: let sample = []; cannam@108: for (let x = 0; x < w; ++x) { cannam@108: for (let i = 0; i < m_per; ++i) { cannam@108: const y = Math.floor(Math.random() * h); cannam@109: const value = matrix[x][y]; cannam@109: if (!isNaN(value) && value !== Infinity) { cannam@109: sample.push(value); cannam@109: } cannam@108: } cannam@108: } cannam@109: if (sample.length === 0) { cannam@109: console.log("WARNING: No samples gathered, even though we hoped for " + cannam@109: (m_per * w) + " of them"); cannam@109: return 0.0; cannam@109: } cannam@108: sample.sort((a,b) => { return a - b; }); cannam@108: const ix = Math.floor((sample.length * percentile) / 100); cannam@108: console.log("Estimating " + percentile + "-%ile of " + cannam@108: n + "-sample dataset (" + w + " x " + h + ") as value " + ix + cannam@108: " of sorted " + sample.length + "-sample subset"); cannam@108: const estimate = sample[ix]; cannam@108: console.log("Estimate is: " + estimate + " (where min sampled value = " + cannam@108: sample[0] + " and max = " + sample[sample.length-1] + ")"); cannam@108: return estimate; cannam@108: } cannam@108: cannam@108: interpolatingMapper(hexColours) { cannam@108: const colours = hexColours.map(n => { cannam@108: const i = parseInt(n, 16); cannam@118: return [ ((i >> 16) & 255) / 255.0, cannam@118: ((i >> 8) & 255) / 255.0, cannam@118: ((i) & 255) / 255.0 ]; cannam@108: }); cannam@108: const last = colours.length - 1; cannam@108: return (value => { cannam@108: const m = value * last; cannam@108: if (m >= last) { cannam@108: return colours[last]; cannam@108: } cannam@108: if (m <= 0) { cannam@108: return colours[0]; cannam@108: } cannam@108: const base = Math.floor(m); cannam@108: const prop0 = base + 1.0 - m; cannam@108: const prop1 = m - base; cannam@108: const c0 = colours[base]; cannam@108: const c1 = colours[base+1]; cannam@118: return [ c0[0] * prop0 + c1[0] * prop1, cannam@118: c0[1] * prop0 + c1[1] * prop1, cannam@118: c0[2] * prop0 + c1[2] * prop1 ]; cannam@108: }); cannam@108: } dev@110: cannam@108: iceMapper() { dev@110: let hexColours = [ cannam@108: // Based on ColorBrewer ylGnBu cannam@108: "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5", cannam@108: "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040" cannam@108: ]; cannam@108: hexColours.reverse(); cannam@108: return this.interpolatingMapper(hexColours); cannam@108: } dev@110: cannam@118: hsv2rgb(h, s, v) { // all values in range [0, 1] cannam@118: const i = Math.floor(h * 6); cannam@118: const f = h * 6 - i; cannam@118: const p = v * (1 - s); cannam@118: const q = v * (1 - f * s); cannam@118: const t = v * (1 - (1 - f) * s); cannam@118: let r = 0, g = 0, b = 0; cannam@118: switch (i % 6) { cannam@118: case 0: r = v, g = t, b = p; break; cannam@118: case 1: r = q, g = v, b = p; break; cannam@118: case 2: r = p, g = v, b = t; break; cannam@118: case 3: r = p, g = q, b = v; break; cannam@118: case 4: r = t, g = p, b = v; break; cannam@118: case 5: r = v, g = p, b = q; break; cannam@118: } cannam@118: return [ r, g, b ]; cannam@118: } dev@122: cannam@118: greenMapper() { cannam@118: const blue = 0.6666; cannam@118: const pieslice = 0.3333; cannam@118: return (value => { cannam@118: const h = blue - value * 2.0 * pieslice; cannam@118: const s = 0.5 + value / 2.0; cannam@118: const v = value; cannam@118: return this.hsv2rgb(h, s, v); cannam@118: }); cannam@118: } cannam@118: cannam@118: sunsetMapper() { cannam@118: return (value => { cannam@118: let r = (value - 0.24) * 2.38; cannam@118: let g = (value - 0.64) * 2.777; cannam@118: let b = (3.6 * value); cannam@118: if (value > 0.277) b = 2.0 - b; cannam@118: return [ r, g, b ]; cannam@118: }); cannam@118: } cannam@118: dev@122: clearTimeline(): void { dev@122: // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly dev@122: const timeContextChildren = this.timeline.timeContext._children; dev@122: for (let track of this.timeline.tracks) { dev@122: if (track.layers.length === 0) { continue; } dev@122: const trackLayers = Array.from(track.layers); dev@122: while (trackLayers.length) { dev@122: let layer: Layer = trackLayers.pop(); dev@122: track.remove(layer); dev@122: dev@122: const index = timeContextChildren.indexOf(layer.timeContext); dev@122: if (index >= 0) { dev@122: timeContextChildren.splice(index, 1); dev@122: } dev@122: layer.destroy(); dev@122: } dev@122: } dev@122: } dev@122: dev@54: renderWaveform(buffer: AudioBuffer): void { cannam@117: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; cannam@117: const waveTrack = this.timeline.getTrackById('wave'); dev@54: if (this.timeline) { dev@54: // resize dev@54: const width = this.trackDiv.nativeElement.getBoundingClientRect().width; dev@55: dev@122: this.clearTimeline(); dev@59: dev@54: this.timeline.visibleWidth = width; dev@54: this.timeline.pixelsPerSecond = width / buffer.duration; cannam@117: waveTrack.height = height; dev@54: } else { dev@54: this.timeline = this.renderTimeline(buffer.duration) dev@54: } dev@83: this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration; cannam@106: dev@18: // time axis dev@18: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@18: height: height, cannam@106: color: '#b0b0b0' dev@18: }); cannam@117: this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); dev@18: dev@20: const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { dev@10: top: 10, dev@20: height: height * 0.9, dev@16: color: 'darkblue' dev@16: }); cannam@117: this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext); cannam@117: dev@53: this.cursorLayer = new wavesUI.helpers.CursorLayer({ dev@31: height: height dev@31: }); cannam@117: this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext); dev@51: this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline); cannam@117: waveTrack.render(); cannam@117: waveTrack.update(); dev@81: dev@81: dev@81: if ('ontouchstart' in window) { dev@110: interface Point { dev@110: x: number; dev@110: y: number; dev@110: } dev@110: dev@113: let zoomGestureJustEnded: boolean = false; dev@113: dev@110: const pixelToExponent: Function = wavesUI.utils.scales.linear() dev@110: .domain([0, 100]) // 100px => factor 2 dev@110: .range([0, 1]); dev@110: dev@110: const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => { dev@110: return Math.pow( dev@110: Math.pow(p2.x - p1.x, 2) + dev@110: Math.pow(p2.y - p1.y, 2), 0.5); dev@110: }; dev@110: dev@84: const hammertime = new Hammer(this.trackDiv.nativeElement); dev@81: const scroll = (ev) => { dev@113: if (zoomGestureJustEnded) { dev@113: zoomGestureJustEnded = false; dev@113: console.log("Skip this event: likely a single touch dangling from pinch"); dev@113: return; dev@113: } dev@110: this.timeline.timeContext.offset = this.offsetAtPanStart + dev@110: this.timeline.timeContext.timeToPixel.invert(ev.deltaX); dev@81: this.timeline.tracks.update(); dev@81: }; dev@84: dev@81: const zoom = (ev) => { dev@81: const minZoom = this.timeline.state.minZoom; dev@81: const maxZoom = this.timeline.state.maxZoom; dev@110: const distance = calculateDistance({ dev@110: x: ev.pointers[0].clientX, dev@110: y: ev.pointers[0].clientY dev@110: }, { dev@110: x: ev.pointers[1].clientX, dev@110: y: ev.pointers[1].clientY dev@110: }); dev@110: dev@110: const lastCenterTime = dev@110: this.timeline.timeContext.timeToPixel.invert(ev.center.x); dev@110: dev@110: const exponent = pixelToExponent(distance - this.initialDistance); dev@110: const targetZoom = this.initialZoom * Math.pow(2, exponent); dev@110: dev@110: this.timeline.timeContext.zoom = dev@110: Math.min(Math.max(targetZoom, minZoom), maxZoom); dev@110: dev@110: const newCenterTime = dev@110: this.timeline.timeContext.timeToPixel.invert(ev.center.x); dev@110: dev@96: this.timeline.timeContext.offset += newCenterTime - lastCenterTime; dev@81: this.timeline.tracks.update(); dev@81: }; dev@81: hammertime.get('pinch').set({ enable: true }); dev@110: hammertime.on('panstart', () => { dev@110: this.offsetAtPanStart = this.timeline.timeContext.offset; dev@110: }); dev@81: hammertime.on('panleft', scroll); dev@81: hammertime.on('panright', scroll); dev@110: hammertime.on('pinchstart', (e) => { dev@110: this.initialZoom = this.timeline.timeContext.zoom; dev@110: dev@110: this.initialDistance = calculateDistance({ dev@110: x: e.pointers[0].clientX, dev@110: y: e.pointers[0].clientY dev@110: }, { dev@110: x: e.pointers[1].clientX, dev@110: y: e.pointers[1].clientY dev@110: }); dev@110: }); dev@81: hammertime.on('pinch', zoom); dev@113: hammertime.on('pinchend', () => { dev@113: zoomGestureJustEnded = true; dev@113: }); dev@81: } dev@81: dev@53: this.animate(); dev@53: } dev@53: cannam@117: renderSpectrogram(buffer: AudioBuffer): void { cannam@117: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; cannam@117: const gridTrack = this.timeline.getTrackById('grid'); cannam@117: dev@129: const spectrogramLayer = new WavesSpectrogramLayer(buffer, { cannam@118: top: height * 0.05, cannam@117: height: height * 0.9, cannam@117: stepSize: 512, dev@129: blockSize: 1024, cannam@118: normalise: 'none', cannam@118: mapper: this.sunsetMapper() cannam@117: }); cannam@117: this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext); cannam@117: cannam@117: this.timeline.tracks.update(); cannam@117: } cannam@117: dev@53: // TODO refactor - this doesn't belong here dev@64: private renderFeatures(extracted: SimpleResponse, colour: Colour): void { dev@64: if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return; dev@64: if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return; dev@64: const features: FeatureCollection = (extracted.features as FeatureCollection); dev@64: const outputDescriptor = extracted.outputDescriptor; cannam@118: const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; cannam@118: const waveTrack = this.timeline.getTrackById('wave'); dev@64: dev@64: // TODO refactor all of this dev@63: switch (features.shape) { dev@64: case 'vector': { dev@63: const stepDuration = (features as FixedSpacedFeatures).stepDuration; dev@63: const featureData = (features.data as Float32Array); dev@68: if (featureData.length === 0) return; dev@63: const normalisationFactor = 1.0 / dev@63: featureData.reduce( dev@63: (currentMax, feature) => Math.max(currentMax, feature), dev@63: -Infinity dev@63: ); dev@67: dev@63: const plotData = [...featureData].map((feature, i) => { dev@63: return { dev@63: cx: i * stepDuration, dev@63: cy: feature * normalisationFactor dev@63: }; dev@63: }); dev@67: dev@105: let lineLayer = new wavesUI.helpers.LineLayer(plotData, { dev@63: color: colour, dev@64: height: height dev@63: }); dev@122: this.addLayer( dev@105: lineLayer, cannam@117: waveTrack, dev@63: this.timeline.timeContext dev@122: ); dev@63: break; dev@64: } dev@64: case 'list': { dev@64: const featureData = (features.data as FeatureList); dev@68: if (featureData.length === 0) return; dev@64: // TODO look at output descriptor instead of directly inspecting features dev@64: const hasDuration = outputDescriptor.configured.hasDuration; dev@64: const isMarker = !hasDuration dev@64: && outputDescriptor.configured.binCount === 0 dev@64: && featureData[0].featureValues == null; dev@64: const isRegion = hasDuration dev@64: && featureData[0].timestamp != null; cannam@149: console.log("Have list features: length " + featureData.length + cannam@149: ", isMarker " + isMarker + ", isRegion " + isRegion + cannam@149: ", hasDuration " + hasDuration); dev@64: // TODO refactor, this is incomprehensible dev@64: if (isMarker) { dev@64: const plotData = featureData.map(feature => { cannam@149: return {time: toSeconds(feature.timestamp)} dev@64: }); cannam@149: let featureLayer = new wavesUI.helpers.TickLayer(plotData, { dev@64: height: height, dev@64: color: colour, dev@64: }); dev@122: this.addLayer( cannam@149: featureLayer, cannam@117: waveTrack, dev@64: this.timeline.timeContext dev@122: ); dev@64: } else if (isRegion) { cannam@149: console.log("Output is of region type"); dev@67: const binCount = outputDescriptor.configured.binCount || 0; dev@67: const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ; dev@64: const getSegmentArgs = () => { dev@64: if (isBarRegion) { dev@64: dev@67: // TODO refactor - this is messy dev@67: interface FoldsToNumber { dev@67: reduce(fn: (previousValue: number, dev@67: currentValue: T, dev@67: currentIndex: number, dev@67: array: ArrayLike) => number, dev@67: initialValue?: number): number; dev@67: } dev@64: dev@67: // TODO potentially change impl., i.e avoid reduce dev@67: const findMin = (arr: FoldsToNumber, getElement: (x: T) => number): number => { dev@67: return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity); dev@67: }; dev@67: dev@67: const findMax = (arr: FoldsToNumber, getElement: (x: T) => number): number => { dev@67: return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity); dev@67: }; dev@67: dev@67: const min = findMin(featureData, (x: Feature) => { dev@67: return findMin(x.featureValues, y => y); dev@67: }); dev@67: dev@67: const max = findMax(featureData, (x: Feature) => { dev@67: return findMax(x.featureValues, y => y); dev@67: }); dev@67: dev@67: const barHeight = 1.0 / height; dev@64: return [ dev@67: featureData.reduce((bars, feature) => { dev@67: const staticProperties = { dev@64: x: toSeconds(feature.timestamp), dev@64: width: toSeconds(feature.duration), dev@67: height: min + barHeight, dev@64: color: colour, dev@64: opacity: 0.8 dev@67: }; dev@67: // TODO avoid copying Float32Array to an array - map is problematic here dev@67: return bars.concat([...feature.featureValues] dev@67: .map(val => Object.assign({}, staticProperties, {y: val}))) dev@67: }, []), dev@67: {yDomain: [min, max + barHeight], height: height} as any dev@67: ]; dev@64: } else { dev@64: return [featureData.map(feature => { dev@64: return { dev@64: x: toSeconds(feature.timestamp), dev@64: width: toSeconds(feature.duration), dev@64: color: colour, dev@64: opacity: 0.8 dev@64: } dev@64: }), {height: height}]; dev@64: } dev@64: }; dev@64: dev@64: let segmentLayer = new wavesUI.helpers.SegmentLayer( dev@64: ...getSegmentArgs() dev@64: ); dev@122: this.addLayer( dev@64: segmentLayer, cannam@117: waveTrack, dev@64: this.timeline.timeContext dev@122: ); dev@64: } dev@64: break; dev@64: } cannam@106: case 'matrix': { cannam@108: const stepDuration = (features as FixedSpacedFeatures).stepDuration; cannam@120: //!!! + start time cannam@108: const matrixData = (features.data as Float32Array[]); cannam@108: if (matrixData.length === 0) return; cannam@109: console.log("matrix data length = " + matrixData.length); cannam@109: console.log("height of first column = " + matrixData[0].length); cannam@109: const targetValue = this.estimatePercentile(matrixData, 95); cannam@108: const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0); cannam@108: console.log("setting gain to " + gain); cannam@120: const matrixEntity = cannam@120: new wavesUI.utils.PrefilledMatrixEntity(matrixData, cannam@120: 0, // startTime cannam@120: stepDuration); cannam@108: let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, { cannam@108: gain, cannam@118: height: height * 0.9, cannam@118: top: height * 0.05, cannam@109: normalise: 'none', cannam@108: mapper: this.iceMapper() cannam@108: }); dev@122: this.addLayer( cannam@108: matrixLayer, cannam@117: waveTrack, cannam@108: this.timeline.timeContext dev@122: ); cannam@108: break; cannam@106: } dev@67: default: cannam@106: console.log("Cannot render an appropriate layer for feature shape '" + cannam@106: features.shape + "'"); dev@63: } dev@59: dev@56: this.timeline.tracks.update(); dev@53: } dev@53: dev@53: private animate(): void { dev@31: this.ngZone.runOutsideAngular(() => { dev@31: // listen for time passing... dev@31: const updateSeekingCursor = () => { dev@53: const currentTime = this.audioService.getCurrentTime(); dev@53: this.cursorLayer.currentPosition = currentTime; dev@53: this.cursorLayer.update(); dev@53: dev@53: const currentOffset = this.timeline.timeContext.offset; dev@53: const offsetTimestamp = currentOffset dev@53: + currentTime; dev@53: dev@53: const visibleDuration = this.timeline.timeContext.visibleDuration; dev@53: // TODO reduce duplication between directions and make more declarative dev@53: // this kinda logic should also be tested dev@53: const mustPageForward = offsetTimestamp > visibleDuration; dev@53: const mustPageBackward = currentTime < -currentOffset; dev@53: dev@53: if (mustPageForward) { dev@53: const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; dev@53: cannam@106: this.timeline.timeContext.offset = hasSkippedMultiplePages ? cannam@106: -currentTime + 0.5 * visibleDuration : cannam@106: currentOffset - visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@53: if (mustPageBackward) { dev@53: const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; cannam@106: this.timeline.timeContext.offset = hasSkippedMultiplePages ? cannam@106: -currentTime + 0.5 * visibleDuration : cannam@106: currentOffset + visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@53: if (this.isPlaying) dev@53: requestAnimationFrame(updateSeekingCursor); dev@31: }; dev@31: updateSeekingCursor(); dev@31: }); dev@6: } dev@16: dev@122: private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void { dev@54: timeContext.zoom = 1.0; dev@54: if (!layer.timeContext) { dev@54: layer.setTimeContext(isAxis ? dev@54: timeContext : new wavesUI.core.LayerTimeContext(timeContext)); dev@54: } dev@54: track.add(layer); dev@54: layer.render(); dev@54: layer.update(); dev@122: if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) { dev@112: track.$layout.appendChild(this.cursorLayer.$el); dev@112: } dev@59: } dev@59: dev@59: private static changeColour(layer: Layer, colour: string): void { dev@59: const butcherShapes = (shape) => { dev@59: shape.install({color: () => colour}); dev@59: shape.params.color = colour; dev@59: shape.update(layer._renderingContext, layer.data); dev@59: }; dev@59: dev@59: layer._$itemCommonShapeMap.forEach(butcherShapes); dev@59: layer._$itemShapeMap.forEach(butcherShapes); dev@59: layer.render(); dev@59: layer.update(); dev@54: } dev@54: dev@51: ngOnDestroy(): void { dev@51: this.featureExtractionSubscription.unsubscribe(); dev@53: this.playingStateSubscription.unsubscribe(); dev@53: this.seekedSubscription.unsubscribe(); dev@51: } dev@154: dev@155: seekStart(): void { dev@155: this.zoomOnMouseDown = this.timeline.timeContext.zoom; dev@157: this.offsetOnMouseDown = this.timeline.timeContext.offset; dev@155: } dev@155: dev@155: seekEnd(x: number): void { dev@157: const hasSameZoom: boolean = this.zoomOnMouseDown === dev@157: this.timeline.timeContext.zoom; dev@157: const hasSameOffset: boolean = this.offsetOnMouseDown === dev@157: this.timeline.timeContext.offset; dev@157: if (hasSameZoom && hasSameOffset) { dev@155: this.seek(x); dev@155: } dev@155: } dev@155: dev@154: seek(x: number): void { dev@154: if (this.timeline) { dev@154: const timeContext: any = this.timeline.timeContext; dev@154: this.audioService.seekTo( dev@154: timeContext.timeToPixel.invert(x)- timeContext.offset dev@154: ); dev@154: } dev@154: } dev@6: }