# HG changeset patch # User Lucas Thompson # Date 1495731329 -3600 # Node ID 82d476b976e0d2c73eb90a71b6635d9cf58a342e # Parent f87a96ab1e3ff262369782aee22ac2aab44d44d0 Move waveform.component and remove all logic not concerned with rendering an audio buffer as a waveform. Wire app in app.component. diff -r f87a96ab1e3f -r 82d476b976e0 src/app/app.component.html --- a/src/app/app.component.html Thu May 25 17:52:46 2017 +0100 +++ b/src/app/app.component.html Thu May 25 17:55:29 2017 +0100 @@ -34,7 +34,8 @@ + [rootAudioUri]="rootAudioUri" + [onSeek]="onSeek"> diff -r f87a96ab1e3f -r 82d476b976e0 src/app/app.component.ts --- a/src/app/app.component.ts Thu May 25 17:52:46 2017 +0100 +++ b/src/app/app.component.ts Thu May 25 17:55:29 2017 +0100 @@ -9,6 +9,7 @@ import {MdIconRegistry} from '@angular/material'; import {Subscription} from 'rxjs/Subscription'; import {AnalysisItem} from './analysis-item/analysis-item.component'; +import {OnSeekHandler} from './playhead/PlayHeadHelpers'; class PersistentStack { private stack: T[]; @@ -74,6 +75,7 @@ private nRecordings: number; // TODO user control for naming a recording private countingId: number; // TODO improve uniquely identifying items private rootAudioUri: string; + private onSeek: OnSeekHandler; constructor(private audioService: AudioPlayerService, private featureService: FeatureExtractionService, @@ -83,6 +85,7 @@ this.canExtract = false; this.nRecordings = 0; this.countingId = 0; + this.onSeek = (time) => this.audioService.seekTo(time); iconRegistry.addSvgIcon( 'duck', @@ -99,6 +102,19 @@ this.audioBuffer = (resource as AudioResource).samples; if (this.audioBuffer) { this.canExtract = true; + const currentRootIndex = this.analyses.findIndex(val => { + return val.rootAudioUri === this.rootAudioUri && val.isRoot; + }); + if (currentRootIndex !== -1) { + this.analyses.set( + currentRootIndex, + Object.assign( + {}, + this.analyses.get(currentRootIndex), + {audioData: this.audioBuffer} + ) + ); + } } } } diff -r f87a96ab1e3f -r 82d476b976e0 src/app/app.module.ts --- a/src/app/app.module.ts Thu May 25 17:52:46 2017 +0100 +++ b/src/app/app.module.ts Thu May 25 17:55:29 2017 +0100 @@ -4,7 +4,7 @@ import { HttpModule } from '@angular/http'; import { AppComponent } from './app.component'; -import { WaveformComponent } from './waveform/waveform.component'; +import { WaveformComponent } from './visualisations/waveform/waveform.component'; import { AudioFileOpenComponent } from './audio-file-open/audio-file-open.component'; import { PlaybackControlComponent } from './playback-control/playback-control.component'; import { @@ -29,6 +29,8 @@ import {ProgressBarComponent} from './progress-bar/progress-bar'; import {UglyMaterialModule} from './ugly-material.module'; import {Observable} from 'rxjs/Observable'; +import {PlayHeadComponent} from './playhead/playhead.component'; +import {LivePlayHeadComponent} from './playhead/live-play-head.component'; export function createAudioContext(): AudioContext { return new ( @@ -114,7 +116,9 @@ ProgressSpinnerComponent, AnalysisItemComponent, NotebookFeedComponent, - ProgressBarComponent + ProgressBarComponent, + PlayHeadComponent, + LivePlayHeadComponent ], imports: [ BrowserModule, diff -r f87a96ab1e3f -r 82d476b976e0 src/app/visualisations/WavesJunk.ts --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/app/visualisations/WavesJunk.ts Thu May 25 17:55:29 2017 +0100 @@ -0,0 +1,163 @@ +/** + * Created by lucast on 24/05/2017. + */ +import wavesUI from 'waves-ui-piper'; +import * as Hammer from 'hammerjs'; +import {TimePixelMapper} from '../playhead/PlayHeadHelpers'; + +// TODO this is named as such as a reminder that it needs to be re-factored +export function attachTouchHandlerBodges(element: HTMLElement, + timeline: Timeline) { + interface Point { + x: number; + y: number; + } + + let zoomGestureJustEnded = false; + + const pixelToExponent: Function = wavesUI.utils.scales.linear() + .domain([0, 100]) // 100px => factor 2 + .range([0, 1]); + + const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => { + return Math.pow( + Math.pow(p2.x - p1.x, 2) + + Math.pow(p2.y - p1.y, 2), 0.5); + }; + + const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => { + return { + x: 0.5 * (p1.x + p2.x), + y: 0.5 * (p1.y + p2.y) + }; + }; + + const hammertime = new Hammer.Manager(element, { + recognizers: [ + [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }] + ] + }); + + // it seems HammerJs binds the event to the window? + // causing these events to propagate to other components? + let initialZoom; + let initialDistance; + let offsetAtPanStart; + let startX; + let isZooming; + + const scroll = (ev) => { + if (ev.center.x - startX === 0) { + return; + } + + if (zoomGestureJustEnded) { + zoomGestureJustEnded = false; + console.log('Skip this event: likely a single touch dangling from pinch'); + return; + } + timeline.timeContext.offset = offsetAtPanStart + + timeline.timeContext.timeToPixel.invert(ev.deltaX); + timeline.tracks.update(); + }; + + const zoom = (ev) => { + if (ev.touches.length < 2) { + return; + } + + ev.preventDefault(); + const minZoom = timeline.state.minZoom; + const maxZoom = timeline.state.maxZoom; + const p1: Point = { + x: ev.touches[0].clientX, + y: ev.touches[0].clientY + }; + const p2: Point = { + x: ev.touches[1].clientX, + y: ev.touches[1].clientY + }; + const distance = calculateDistance(p1, p2); + const midPoint = calculateMidPoint(p1, p2); + + const lastCenterTime = + timeline.timeContext.timeToPixel.invert(midPoint.x); + + const exponent = pixelToExponent(distance - initialDistance); + const targetZoom = initialZoom * Math.pow(2, exponent); + + timeline.timeContext.zoom = + Math.min(Math.max(targetZoom, minZoom), maxZoom); + + const newCenterTime = + timeline.timeContext.timeToPixel.invert(midPoint.x); + + timeline.timeContext.offset += newCenterTime - lastCenterTime; + timeline.tracks.update(); + }; + hammertime.on('panstart', (ev) => { + offsetAtPanStart = timeline.timeContext.offset; + startX = ev.center.x; + }); + hammertime.on('panleft', scroll); + hammertime.on('panright', scroll); + + element.addEventListener('touchstart', (e) => { + if (e.touches.length < 2) { + return; + } + + isZooming = true; + initialZoom = timeline.timeContext.zoom; + + initialDistance = calculateDistance({ + x: e.touches[0].clientX, + y: e.touches[0].clientY + }, { + x: e.touches[1].clientX, + y: e.touches[1].clientY + }); + }); + element.addEventListener('touchend', () => { + if (isZooming) { + isZooming = false; + zoomGestureJustEnded = true; + } + }); + element.addEventListener('touchmove', zoom); +} + +export function naivePagingMapper(timeline: Timeline): TimePixelMapper { + return (currentTime: number) => { + const currentOffset = timeline.timeContext.offset; + const offsetTimestamp = currentOffset + + currentTime; + + const visibleDuration = timeline.timeContext.visibleDuration; + const mustPageForward = offsetTimestamp > visibleDuration; + const mustPageBackward = currentTime < -currentOffset; + + if (mustPageForward) { + const hasSkippedMultiplePages = + offsetTimestamp - visibleDuration > visibleDuration; + + timeline.timeContext.offset = hasSkippedMultiplePages ? + -currentTime + 0.5 * visibleDuration : + currentOffset - visibleDuration; + } + + if (mustPageBackward) { + const hasSkippedMultiplePages = + currentTime + visibleDuration < -currentOffset; + timeline.timeContext.offset = hasSkippedMultiplePages ? + -currentTime + 0.5 * visibleDuration : + currentOffset + visibleDuration; + } + + if (mustPageForward || mustPageBackward) { + timeline.tracks.update(); + } + // + return timeline.timeContext.timeToPixel(timeline.offset + currentTime); + }; +} diff -r f87a96ab1e3f -r 82d476b976e0 src/app/visualisations/waveform/waveform.component.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/app/visualisations/waveform/waveform.component.css Thu May 25 17:55:29 2017 +0100 @@ -0,0 +1,4 @@ +.track { + height: 100%; + width: 100%; +} diff -r f87a96ab1e3f -r 82d476b976e0 src/app/visualisations/waveform/waveform.component.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/app/visualisations/waveform/waveform.component.html Thu May 25 17:55:29 2017 +0100 @@ -0,0 +1,4 @@ +
diff -r f87a96ab1e3f -r 82d476b976e0 src/app/visualisations/waveform/waveform.component.ts --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/app/visualisations/waveform/waveform.component.ts Thu May 25 17:55:29 2017 +0100 @@ -0,0 +1,177 @@ +import { + Component, + ViewChild, + ElementRef, + Input, + ChangeDetectorRef +} from '@angular/core'; +import wavesUI from 'waves-ui-piper'; +import {attachTouchHandlerBodges} from '../WavesJunk'; +import {OnSeekHandler} from '../../playhead/PlayHeadHelpers'; + +type Layer = any; +type Track = any; + +@Component({ + selector: 'ugly-waveform', + templateUrl: './waveform.component.html', + styleUrls: ['./waveform.component.css'] +}) +export class WaveformComponent { + + @ViewChild('track') trackDiv: ElementRef; + @Input() set width(width: number) { + if (this.timeline) { + requestAnimationFrame(() => { + this.timeline.timeContext.visibleWidth = width; + this.timeline.tracks.update(); + }); + } + } + @Input() timeline: Timeline; + @Input() trackIdPrefix: string; + @Input() onSeek: OnSeekHandler; + + @Input() set audioBuffer(buffer: AudioBuffer) { + this._audioBuffer = buffer || undefined; + if (this.audioBuffer) { + this.renderWaveform(this.audioBuffer); + } + } + + get audioBuffer(): AudioBuffer { + return this._audioBuffer; + } + + private _audioBuffer: AudioBuffer; + private layers: Layer[]; + private zoomOnMouseDown: number; + private offsetOnMouseDown: number; + private waveTrack: Track; + + constructor(private ref: ChangeDetectorRef) { + this.layers = []; + } + + renderTimeline(duration: number = 1.0): Timeline { + const track: HTMLElement = this.trackDiv.nativeElement; + track.innerHTML = ''; + const height: number = track.getBoundingClientRect().height; + const width: number = track.getBoundingClientRect().width; + this.timeline.pixelsPerSecond = width / duration; + this.timeline.visibleWidth = width; + this.waveTrack = this.timeline.createTrack( + track, + height, + `wave-${this.trackIdPrefix || 'default'}` + ); + + if ('ontouchstart' in window) { + attachTouchHandlerBodges(this.trackDiv.nativeElement, this.timeline); + } + } + + // TODO can likely be removed, or use waves-ui methods + clearTimeline(): void { + // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly + const timeContextChildren = this.timeline.timeContext._children; + for (const track of this.timeline.tracks) { + if (track.layers.length === 0) { continue; } + const trackLayers = Array.from(track.layers); + while (trackLayers.length) { + const layer: Layer = trackLayers.pop(); + if (this.layers.includes(layer)) { + track.remove(layer); + this.layers.splice(this.layers.indexOf(layer), 1); + const index = timeContextChildren.indexOf(layer.timeContext); + if (index >= 0) { + timeContextChildren.splice(index, 1); + } + layer.destroy(); + } + } + } + } + + renderWaveform(buffer: AudioBuffer): void { + const height = this.trackDiv.nativeElement.getBoundingClientRect().height; + if (this.timeline && this.waveTrack) { + // resize + const width = this.trackDiv.nativeElement.getBoundingClientRect().width; + + this.clearTimeline(); + this.timeline.visibleWidth = width; + this.timeline.pixelsPerSecond = width / buffer.duration; + this.waveTrack.height = height; + } else { + this.renderTimeline(buffer.duration); + } + this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration; + + // time axis + const timeAxis = new wavesUI.helpers.TimeAxisLayer({ + height: height, + color: '#b0b0b0' + }); + this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true); + + const nchannels = buffer.numberOfChannels; + const totalWaveHeight = height * 0.9; + const waveHeight = totalWaveHeight / nchannels; + + for (let ch = 0; ch < nchannels; ++ch) { + const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { + top: (height - totalWaveHeight) / 2 + waveHeight * ch, + height: waveHeight, + color: '#0868ac', + channel: ch + }); + this.addLayer(waveformLayer, this.waveTrack, this.timeline.timeContext); + } + + this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline); + this.waveTrack.render(); + this.waveTrack.update(); + this.ref.markForCheck(); + } + + // TODO can likely use methods in waves-ui directly + private addLayer(layer: Layer, + track: Track, + timeContext: any, + isAxis: boolean = false): void { + timeContext.zoom = 1.0; + if (!layer.timeContext) { + layer.setTimeContext(isAxis ? + timeContext : new wavesUI.core.LayerTimeContext(timeContext)); + } + track.add(layer); + this.layers.push(layer); + layer.render(); + layer.update(); + } + + seekStart(): void { + this.zoomOnMouseDown = this.timeline.timeContext.zoom; + this.offsetOnMouseDown = this.timeline.timeContext.offset; + } + + seekEnd(x: number): void { + const hasSameZoom: boolean = this.zoomOnMouseDown === + this.timeline.timeContext.zoom; + const hasSameOffset: boolean = this.offsetOnMouseDown === + this.timeline.timeContext.offset; + if (hasSameZoom && hasSameOffset) { + this.seek(x); + } + } + + seek(x: number): void { + if (this.timeline) { + const timeContext: any = this.timeline.timeContext; + if (this.onSeek) { + this.onSeek(timeContext.timeToPixel.invert(x) - timeContext.offset); + } + } + } +} diff -r f87a96ab1e3f -r 82d476b976e0 src/app/visualisations/waveform/waves-ui-piper.d.ts --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/app/visualisations/waveform/waves-ui-piper.d.ts Thu May 25 17:55:29 2017 +0100 @@ -0,0 +1,119 @@ +/// +declare const AmbientInstance: WavesUserInterface; + +declare module 'waves-ui-piper' { + export default AmbientInstance; +} + +interface WavesUserInterface { + core: Core; + helpers: any; + states: any; + utils: Utilities; + shapes: any; +} + +interface MatrixEntity { + getColumnCount(): number; + getColumnHeight(): number; + getColumn(n: number): Float32Array | number[]; + getStepDuration(): number; + getStartTime(): number; + dispose(): void; +} + +type TimeContext = any; // TODO + +interface Area { + top: number; + left: number; + width: number; + height: number; +} + +interface Layer extends NodeJS.EventEmitter { + start: number; + offset: number; + duration: number; + stretchRatio: number; + yDomain: number[]; + opacity: number; + readonly timeToPixel: () => (time: number) => number; + readonly valueToPixel: () => (value: number) => number; + readonly items: Element[]; + readonly selectedItems: Element[]; + data: ArrayLike | Object; + destroy(): void; + configureTimeContextBehaviour(ctor: ObjectConstructor): void; + setTimeContext(context: TimeContext): void; + configureShape(ctor: ObjectConstructor /* TODO BaseShape*/, + accessors: Object, + options: Object): void; + configureCommonShape(ctor: ObjectConstructor /* TODO BaseShape*/, + accessors: Object, + options: Object): void; + setBehaviour(behaviour: Object /* TODO BaseBehavior */): void; + select(...$items: Element[]); + unselect(...$items: Element[]); + toggleSelection(...$items: Element[]); + edit($items: Element[], dx: number, dy: number, $target: Element): void; + setContextEditable(bool: boolean): void; + editContext(dx: number, dy: number, $target: Element): void; + stretchContext(dx: number, dy: number, $target: Element): void; + getItemFromDOMElement($el: Element): Element | null; + getDatumFromItem($item: Element): Object | any[] | null; + getDatumFromDOMElement($item: Element): Object | any[] | null; + hasItem($item: Element): boolean; + hasElement($el: Element): boolean; + getItemsInArea(area: Area): Element[]; + render(): void; + update(): void; + updateContainer(): void; + updateShapes(): void; +} + +interface LayerConstructor { + new(dataType: 'entity' | 'collection', + data: ArrayLike | Object, + options: Object): Layer; +} + +interface MatrixEntityConstructor { + new(): MatrixEntity; +} + +interface PrefilledMatrixEntityConstructor { + new(data: Float32Array[] | number[][], + startTime: number, + stepDuration: number): MatrixEntity; +} + +interface Utilities { + MatrixEntity: MatrixEntityConstructor; + PrefilledMatrixEntity: PrefilledMatrixEntityConstructor; + scales: any; +} + +type Timeline = any; + +interface Core { + Layer: LayerConstructor; + LayerTimeContext: any; // TODO + Timeline: Timeline; // TODO + TimelineTimeContext: TimelineTimeContextConstructor; +} + +interface TimelineTimeContext { + pixelsPerSecond: number; + readonly computedPixelsPerSecond: number; + offset: number; + zoom: number; + visibleWidth: number; + readonly visibleDuration: number; + maintainVisibleDuration: boolean; + timeToPixel: (time: number) => number; +} + +interface TimelineTimeContextConstructor { + new(pixelsPerSecond: number, visibleWidth: number): TimelineTimeContext; +} diff -r f87a96ab1e3f -r 82d476b976e0 src/app/waveform/waveform.component.css --- a/src/app/waveform/waveform.component.css Thu May 25 17:52:46 2017 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -.track { - height: 160px; - width: 100%; -} diff -r f87a96ab1e3f -r 82d476b976e0 src/app/waveform/waveform.component.html --- a/src/app/waveform/waveform.component.html Thu May 25 17:52:46 2017 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -
- - - diff -r f87a96ab1e3f -r 82d476b976e0 src/app/waveform/waveform.component.ts --- a/src/app/waveform/waveform.component.ts Thu May 25 17:52:46 2017 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1020 +0,0 @@ -import { - Component, - OnInit, - ViewChild, - ElementRef, - Input, - AfterViewInit, - NgZone, - OnDestroy, - ChangeDetectorRef -} from '@angular/core'; -import { - AudioPlayerService, AudioResource, - AudioResourceError -} from '../services/audio-player/audio-player.service'; -import wavesUI from 'waves-ui-piper'; -import { - FeatureExtractionService -} from '../services/feature-extraction/feature-extraction.service'; -import {Subscription} from 'rxjs/Subscription'; -import { - FeatureCollection, - SimpleResponse, - VectorFeature, - MatrixFeature, - TracksFeature -} from 'piper/HigherLevelUtilities'; -import {toSeconds, OutputDescriptor} from 'piper'; -import {FeatureList, Feature} from 'piper/Feature'; -import * as Hammer from 'hammerjs'; -import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram'; -import {iceMapper, sunsetMapper} from 'app/spectrogram/ColourMap'; -import {estimatePercentile} from '../spectrogram/MatrixUtils'; - -type Layer = any; -type Track = any; -type Colour = string; - - - -function* createColourGenerator(colours) { - let index = 0; - const nColours = colours.length; - while (true) { - yield colours[index = ++index % nColours]; - } -} - -const defaultColourGenerator = createColourGenerator([ - '#0868ac', // "sapphire blue", our waveform / header colour - '#c33c54', // "brick red" - '#17bebb', // "tiffany blue" - '#001021', // "rich black" - '#fa8334', // "mango tango" - '#034748' // "deep jungle green" -]); - -type HigherLevelFeatureShape = 'regions' | 'instants' | 'notes'; -type NoteLikeUnit = 'midi' | 'hz' ; -interface Note { - time: number; - duration: number; - pitch: number; - velocity?: number; -} - -@Component({ - selector: 'ugly-waveform', - templateUrl: './waveform.component.html', - styleUrls: ['./waveform.component.css'] -}) - -export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { - - @ViewChild('track') trackDiv: ElementRef; - @Input() set width(width: number) { - if (this.timeline) { - requestAnimationFrame(() => { - this.timeline.timeContext.visibleWidth = width; - this.timeline.tracks.update(); - }); - } - } - @Input() timeline: Timeline; - @Input() trackIdPrefix: string; - @Input() set isSubscribedToExtractionService(isSubscribed: boolean) { - if (isSubscribed) { - if (this.featureExtractionSubscription) { - return; - } - - this.featureExtractionSubscription = - this.piperService.featuresExtracted$.subscribe( - features => { - this.renderFeatures(features, defaultColourGenerator.next().value); - }); - } else { - if (this.featureExtractionSubscription) { - this.featureExtractionSubscription.unsubscribe(); - } - } - } - @Input() set isSubscribedToAudioService(isSubscribed: boolean) { - this._isSubscribedToAudioService = isSubscribed; - if (isSubscribed) { - if (this.onAudioDataSubscription) { - return; - } - - this.onAudioDataSubscription = - this.audioService.audioLoaded$.subscribe(res => { - const wasError = (res as AudioResourceError).message != null; - - if (wasError) { - console.warn('No audio, display error?'); - } else { - this.audioBuffer = (res as AudioResource).samples; - } - }); - } else { - if (this.onAudioDataSubscription) { - this.onAudioDataSubscription.unsubscribe(); - } - } - } - - get isSubscribedToAudioService(): boolean { - return this._isSubscribedToAudioService; - } - - @Input() set isOneShotExtractor(isOneShot: boolean) { - this._isOneShotExtractor = isOneShot; - } - - get isOneShotExtractor(): boolean { - return this._isOneShotExtractor; - } - - @Input() set isSeeking(isSeeking: boolean) { - this._isSeeking = isSeeking; - if (isSeeking) { - if (this.seekedSubscription) { - return; - } - if (this.playingStateSubscription) { - return; - } - - this.seekedSubscription = this.audioService.seeked$.subscribe(() => { - if (!this.audioService.isPlaying()) { - this.animate(); - } - }); - this.playingStateSubscription = - this.audioService.playingStateChange$.subscribe( - isPlaying => { - if (isPlaying) { - this.animate(); - } - }); - } else { - if (this.cursorLayer && this.waveTrack) { - this.waveTrack.remove(this.cursorLayer); - } - if (this.playingStateSubscription) { - this.playingStateSubscription.unsubscribe(); - } - if (this.seekedSubscription) { - this.seekedSubscription.unsubscribe(); - } - } - } - - get isSeeking(): boolean { - return this._isSeeking; - } - - set audioBuffer(buffer: AudioBuffer) { - this._audioBuffer = buffer || undefined; - if (this.audioBuffer) { - this.renderWaveform(this.audioBuffer); - // this.renderSpectrogram(this.audioBuffer); - } - } - - get audioBuffer(): AudioBuffer { - return this._audioBuffer; - } - - private _audioBuffer: AudioBuffer; - private _isSubscribedToAudioService: boolean; - private _isOneShotExtractor: boolean; - private _isSeeking: boolean; - private cursorLayer: any; - private highlightLayer: any; - private layers: Layer[]; - private featureExtractionSubscription: Subscription; - private playingStateSubscription: Subscription; - private seekedSubscription: Subscription; - private onAudioDataSubscription: Subscription; - private zoomOnMouseDown: number; - private offsetOnMouseDown: number; - private hasShot: boolean; - private isLoading: boolean; - private waveTrack: Track; - - private static changeColour(layer: Layer, colour: string): void { - const butcherShapes = (shape) => { - shape.install({color: () => colour}); - shape.params.color = colour; - shape.update(layer._renderingContext, layer.data); - }; - - layer._$itemCommonShapeMap.forEach(butcherShapes); - layer._$itemShapeMap.forEach(butcherShapes); - layer.render(); - layer.update(); - } - - constructor(private audioService: AudioPlayerService, - private piperService: FeatureExtractionService, - private ngZone: NgZone, - private ref: ChangeDetectorRef) { - this.isSubscribedToAudioService = true; - this.isSeeking = true; - this.layers = []; - this.audioBuffer = undefined; - this.timeline = undefined; - this.cursorLayer = undefined; - this.highlightLayer = undefined; - this.isLoading = true; - } - - ngOnInit() { - } - - ngAfterViewInit(): void { - this.trackIdPrefix = this.trackIdPrefix || 'default'; - if (this.timeline) { - this.renderTimeline(null, true, true); - } else { - this.renderTimeline(); - } - } - - renderTimeline(duration: number = 1.0, - useExistingDuration: boolean = false, - isInitialRender: boolean = false): Timeline { - const track: HTMLElement = this.trackDiv.nativeElement; - track.innerHTML = ''; - const height: number = track.getBoundingClientRect().height; - const width: number = track.getBoundingClientRect().width; - const pixelsPerSecond = width / duration; - const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline; - - if (hasExistingTimeline) { - if (!useExistingDuration) { - this.timeline.pixelsPerSecond = pixelsPerSecond; - this.timeline.visibleWidth = width; - } - } else { - this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); - } - this.waveTrack = this.timeline.createTrack( - track, - height, - `wave-${this.trackIdPrefix}` - ); - if (isInitialRender && hasExistingTimeline) { - // time axis - const timeAxis = new wavesUI.helpers.TimeAxisLayer({ - height: height, - color: '#b0b0b0' - }); - this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true); - this.cursorLayer = new wavesUI.helpers.CursorLayer({ - height: height, - color: '#c33c54' - }); - this.addLayer( - this.cursorLayer, - this.waveTrack, - this.timeline.timeContext - ); - } - if ('ontouchstart' in window) { - interface Point { - x: number; - y: number; - } - - let zoomGestureJustEnded = false; - - const pixelToExponent: Function = wavesUI.utils.scales.linear() - .domain([0, 100]) // 100px => factor 2 - .range([0, 1]); - - const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => { - return Math.pow( - Math.pow(p2.x - p1.x, 2) + - Math.pow(p2.y - p1.y, 2), 0.5); - }; - - const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => { - return { - x: 0.5 * (p1.x + p2.x), - y: 0.5 * (p1.y + p2.y) - }; - }; - - const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, { - recognizers: [ - [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }] - ] - }); - - // it seems HammerJs binds the event to the window? - // causing these events to propagate to other components? - const componentTimeline = this.timeline; - let initialZoom; - let initialDistance; - let offsetAtPanStart; - let startX; - let isZooming; - - const scroll = (ev) => { - if (ev.center.x - startX === 0) { - return; - } - - if (zoomGestureJustEnded) { - zoomGestureJustEnded = false; - console.log('Skip this event: likely a single touch dangling from pinch'); - return; - } - componentTimeline.timeContext.offset = offsetAtPanStart + - componentTimeline.timeContext.timeToPixel.invert(ev.deltaX); - componentTimeline.tracks.update(); - }; - - const zoom = (ev) => { - if (ev.touches.length < 2) { - return; - } - - ev.preventDefault(); - const minZoom = componentTimeline.state.minZoom; - const maxZoom = componentTimeline.state.maxZoom; - const p1: Point = { - x: ev.touches[0].clientX, - y: ev.touches[0].clientY - }; - const p2: Point = { - x: ev.touches[1].clientX, - y: ev.touches[1].clientY - }; - const distance = calculateDistance(p1, p2); - const midPoint = calculateMidPoint(p1, p2); - - const lastCenterTime = - componentTimeline.timeContext.timeToPixel.invert(midPoint.x); - - const exponent = pixelToExponent(distance - initialDistance); - const targetZoom = initialZoom * Math.pow(2, exponent); - - componentTimeline.timeContext.zoom = - Math.min(Math.max(targetZoom, minZoom), maxZoom); - - const newCenterTime = - componentTimeline.timeContext.timeToPixel.invert(midPoint.x); - - componentTimeline.timeContext.offset += newCenterTime - lastCenterTime; - componentTimeline.tracks.update(); - }; - hammertime.on('panstart', (ev) => { - offsetAtPanStart = componentTimeline.timeContext.offset; - startX = ev.center.x; - }); - hammertime.on('panleft', scroll); - hammertime.on('panright', scroll); - - - const element: HTMLElement = this.trackDiv.nativeElement; - element.addEventListener('touchstart', (e) => { - if (e.touches.length < 2) { - return; - } - - isZooming = true; - initialZoom = componentTimeline.timeContext.zoom; - - initialDistance = calculateDistance({ - x: e.touches[0].clientX, - y: e.touches[0].clientY - }, { - x: e.touches[1].clientX, - y: e.touches[1].clientY - }); - }); - element.addEventListener('touchend', () => { - if (isZooming) { - isZooming = false; - zoomGestureJustEnded = true; - } - }); - element.addEventListener('touchmove', zoom); - } - // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`); - // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`); - } - - clearTimeline(): void { - // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly - const timeContextChildren = this.timeline.timeContext._children; - for (const track of this.timeline.tracks) { - if (track.layers.length === 0) { continue; } - const trackLayers = Array.from(track.layers); - while (trackLayers.length) { - const layer: Layer = trackLayers.pop(); - if (this.layers.includes(layer)) { - track.remove(layer); - this.layers.splice(this.layers.indexOf(layer), 1); - const index = timeContextChildren.indexOf(layer.timeContext); - if (index >= 0) { - timeContextChildren.splice(index, 1); - } - layer.destroy(); - } - } - } - } - - renderWaveform(buffer: AudioBuffer): void { - const height = this.trackDiv.nativeElement.getBoundingClientRect().height; - if (this.timeline) { - // resize - const width = this.trackDiv.nativeElement.getBoundingClientRect().width; - - this.clearTimeline(); - - this.timeline.visibleWidth = width; - this.timeline.pixelsPerSecond = width / buffer.duration; - this.waveTrack.height = height; - } else { - this.renderTimeline(buffer.duration); - } - this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration; - - // time axis - const timeAxis = new wavesUI.helpers.TimeAxisLayer({ - height: height, - color: '#b0b0b0' - }); - this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true); - - const nchannels = buffer.numberOfChannels; - const totalWaveHeight = height * 0.9; - const waveHeight = totalWaveHeight / nchannels; - - for (let ch = 0; ch < nchannels; ++ch) { - console.log('about to construct a waveform layer for channel ' + ch); - const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { - top: (height - totalWaveHeight) / 2 + waveHeight * ch, - height: waveHeight, - color: '#0868ac', - channel: ch - }); - this.addLayer(waveformLayer, this.waveTrack, this.timeline.timeContext); - } - - this.cursorLayer = new wavesUI.helpers.CursorLayer({ - height: height, - color: '#c33c54' - }); - this.addLayer(this.cursorLayer, this.waveTrack, this.timeline.timeContext); - this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline); - this.waveTrack.render(); - this.waveTrack.update(); - - this.isLoading = false; - this.ref.markForCheck(); - this.animate(); - } - - renderSpectrogram(buffer: AudioBuffer): void { - const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; - const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`); - - const spectrogramLayer = new WavesSpectrogramLayer(buffer, { - top: 0, - height: height, - stepSize: 512, - blockSize: 1024, - normalise: 'none', - mapper: sunsetMapper() - }); - this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext); - - this.timeline.tracks.update(); - } - - private addLineLayers(features: VectorFeature[], - unit: string, - colour: Colour) { - - // Winnow out empty features - features = features.filter(feature => (feature.data.length > 0)); - - // First establish a [min,max] range across all of the features - let [min, max] = features.reduce((acc, feature) => { - return feature.data.reduce((acc, val) => { - const [min, max] = acc; - return [Math.min (min, val), Math.max (max, val)]; - }, acc); - }, [Infinity, -Infinity]); - - console.log('addLineLayers: ' + features.length + ' non-empty features, overall min = ' + min + ', max = ' + max); - - if (min === Infinity) { - min = 0; - max = 1; - } - - if (min !== min || max !== max) { - console.log('WARNING: min or max is NaN'); - min = 0; - max = 1; - } - - const height = this.trackDiv.nativeElement.getBoundingClientRect().height; - - // Now add a line layer for each vector feature - const lineLayers = features.map(feature => { - - let duration = 0; - - // Give the plot items positions relative to the start of the - // line, rather than relative to absolute time 0. This is - // because we'll be setting the layer timeline start property - // later on and these will be positioned relative to that - - const plotData = [...feature.data].map((val, i) => { - const t = i * feature.stepDuration; - duration = t + feature.stepDuration; - return { - cx: t, - cy: val - }; - }); - - const lineLayer = new wavesUI.helpers.LineLayer(plotData, { - color: colour, - height: height, - yDomain: [ min, max ] - }); - this.addLayer( - lineLayer, - this.waveTrack, - this.timeline.timeContext - ); - - // Set start and duration so that the highlight layer can use - // them to determine which line to draw values from - lineLayer.start = feature.startTime; - lineLayer.duration = duration; - - return lineLayer; - }); - - this.addScaleAndHighlight(this.waveTrack, lineLayers, unit, colour, min, max); - } - - private addScaleAndHighlight(waveTrack, - lineLayers, - unit: string, - colour: Colour, - min: number, - max: number) { - - const height = this.trackDiv.nativeElement.getBoundingClientRect().height; - - // And a single scale layer at left - // !!! todo: unit in scale layer - const scaleLayer = new wavesUI.helpers.ScaleLayer({ - tickColor: colour, - textColor: colour, - height: height, - yDomain: [ min, max ] - }); - this.addLayer( - scaleLayer, - waveTrack, - this.timeline.timeContext - ); - - // And a single highlight layer which uses all of the line layers - // as its source material - this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, { - opacity: 0.7, - height: height, - color: '#c33c54', - labelOffset: 38, - yDomain: [ min, max ], - unit - }); - this.addLayer( - this.highlightLayer, - waveTrack, - this.timeline.timeContext - ); - } - - // TODO refactor - this doesn't belong here - private renderFeatures(extracted: SimpleResponse, colour: Colour): void { - if (this.isOneShotExtractor && !this.hasShot) { - this.featureExtractionSubscription.unsubscribe(); - this.hasShot = true; - } - - if (!extracted.hasOwnProperty('features') - || !extracted.hasOwnProperty('outputDescriptor')) { - return; - } - if (!extracted.features.hasOwnProperty('shape') - || !extracted.features.hasOwnProperty('collected')) { - return; - } - const features: FeatureCollection = (extracted.features as FeatureCollection); - const outputDescriptor = extracted.outputDescriptor; - const height = this.trackDiv.nativeElement.getBoundingClientRect().height; - - let unit = ''; - if (outputDescriptor.configured.hasOwnProperty('unit')) { - unit = outputDescriptor.configured.unit; - } - - // TODO refactor all of this - switch (features.shape) { - - case 'vector': { - const collected = features.collected as VectorFeature; - this.addLineLayers([collected], unit, colour); - break; - } - - case 'tracks': { - const collected = features.collected as TracksFeature; - this.addLineLayers(collected, unit, colour); - break; - } - - case 'list': { - const featureData = features.collected as FeatureList; - if (featureData.length === 0) { - return; - } - - // TODO refactor, this is incomprehensible - try { - const featureShape = deduceHigherLevelFeatureShape( - featureData, - outputDescriptor - ); - switch (featureShape) { - case 'instants': - const plotData = featureData.map(feature => ({ - time: toSeconds(feature.timestamp), - label: feature.label - })); - const featureLayer = new wavesUI.helpers.TickLayer(plotData, { - height: height, - color: colour, - labelPosition: 'bottom', - shadeSegments: true - }); - this.addLayer( - featureLayer, - this.waveTrack, - this.timeline.timeContext - ); - break; - case 'regions': - this.renderRegions( - featureData, - outputDescriptor, - this.waveTrack, - height, - colour - ); - break; - case 'notes': - const notes = mapFeaturesToNotes(featureData, outputDescriptor); - let [min, max] = notes.reduce((acc, note) => { - const [min, max] = acc; - return [Math.min (min, note.pitch), Math.max (max, note.pitch)]; - }, [Infinity, -Infinity]); - if (min === Infinity || min < 0 || max < 0) { - min = 0; - max = 127; - } - // round min and max to octave boundaries (starting at C as in MIDI) - min = 12 * Math.floor(min / 12); - max = 12 * Math.ceil(max / 12); - const pianoRollLayer = new wavesUI.helpers.PianoRollLayer( - notes, - {height: height, color: colour, yDomain: [min, max] } - ); - this.addLayer( - pianoRollLayer, - this.waveTrack, - this.timeline.timeContext - ); - this.addScaleAndHighlight( - this.waveTrack, - [pianoRollLayer], - '', - colour, - min, - max - ); - break; - } - } catch (e) { - console.warn(e); // TODO display - break; - } - break; - } - case 'matrix': { - const collected = features.collected as MatrixFeature; - const startTime = collected.startTime; // !!! + make use of - const stepDuration = collected.stepDuration; - const matrixData = collected.data; - - if (matrixData.length === 0) { - return; - } - - console.log('matrix data length = ' + matrixData.length); - console.log('height of first column = ' + matrixData[0].length); - const targetValue = estimatePercentile(matrixData, 95); - const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0); - const matrixEntity = - new wavesUI.utils.PrefilledMatrixEntity(matrixData, - 0, // startTime - stepDuration); - const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, { - gain, - top: 0, - height: height, - normalise: 'none', - mapper: iceMapper() - }); - this.addLayer( - matrixLayer, - this.waveTrack, - this.timeline.timeContext - ); - break; - } - default: - console.log( - `Cannot render an appropriate layer for feature shape '${features.shape}'` - ); - } - - this.isLoading = false; - this.ref.markForCheck(); - this.timeline.tracks.update(); - this.animate(); - } - - private animate(): void { - if (!this.isSeeking) { - return; - } - - this.ngZone.runOutsideAngular(() => { - // listen for time passing... - const updateSeekingCursor = () => { - const currentTime = this.audioService.getCurrentTime(); - this.cursorLayer.currentPosition = currentTime; - this.cursorLayer.update(); - - if (this.highlightLayer) { - this.highlightLayer.currentPosition = currentTime; - this.highlightLayer.update(); - } - - const currentOffset = this.timeline.timeContext.offset; - const offsetTimestamp = currentOffset - + currentTime; - - const visibleDuration = this.timeline.timeContext.visibleDuration; - // TODO reduce duplication between directions and make more declarative - // this kinda logic should also be tested - const mustPageForward = offsetTimestamp > visibleDuration; - const mustPageBackward = currentTime < -currentOffset; - - if (mustPageForward) { - const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; - - this.timeline.timeContext.offset = hasSkippedMultiplePages ? - -currentTime + 0.5 * visibleDuration : - currentOffset - visibleDuration; - this.timeline.tracks.update(); - } - - if (mustPageBackward) { - const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; - this.timeline.timeContext.offset = hasSkippedMultiplePages ? - -currentTime + 0.5 * visibleDuration : - currentOffset + visibleDuration; - this.timeline.tracks.update(); - } - - if (this.audioService.isPlaying()) { - requestAnimationFrame(updateSeekingCursor); - } - }; - updateSeekingCursor(); - }); - } - - // TODO not sure how much of the logic in here is actually sensible w.r.t - // what it functionally produces - private renderRegions(featureData: FeatureList, - outputDescriptor: OutputDescriptor, - waveTrack: any, - height: number, - colour: Colour) { - console.log('Output is of region type'); - const binCount = outputDescriptor.configured.binCount || 0; - const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ; - const getSegmentArgs = () => { - if (isBarRegion) { - - // TODO refactor - this is messy - interface FoldsToNumber { - reduce(fn: (previousValue: number, - currentValue: T, - currentIndex: number, - array: ArrayLike) => number, - initialValue?: number): number; - } - - // TODO potentially change impl., i.e avoid reduce - const findMin = (arr: FoldsToNumber, - getElement: (x: T) => number): number => { - return arr.reduce( - (min, val) => Math.min(min, getElement(val)), - Infinity - ); - }; - - const findMax = (arr: FoldsToNumber, - getElement: (x: T) => number): number => { - return arr.reduce( - (min, val) => Math.max(min, getElement(val)), - -Infinity - ); - }; - - const min = findMin(featureData, (x: Feature) => { - return findMin(x.featureValues, y => y); - }); - - const max = findMax(featureData, (x: Feature) => { - return findMax(x.featureValues, y => y); - }); - - const barHeight = 1.0 / height; - return [ - featureData.reduce((bars, feature) => { - const staticProperties = { - x: toSeconds(feature.timestamp), - width: toSeconds(feature.duration), - height: min + barHeight, - color: colour, - opacity: 0.8 - }; - // TODO avoid copying Float32Array to an array - map is problematic here - return bars.concat([...feature.featureValues] - .map(val => Object.assign({}, staticProperties, {y: val}))); - }, []), - {yDomain: [min, max + barHeight], height: height} as any - ]; - } else { - return [featureData.map(feature => ({ - x: toSeconds(feature.timestamp), - width: toSeconds(feature.duration), - color: colour, - opacity: 0.8 - })), {height: height}]; - } - }; - - const segmentLayer = new wavesUI.helpers.SegmentLayer( - ...getSegmentArgs() - ); - this.addLayer( - segmentLayer, - waveTrack, - this.timeline.timeContext - ); - } - - private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void { - timeContext.zoom = 1.0; - if (!layer.timeContext) { - layer.setTimeContext(isAxis ? - timeContext : new wavesUI.core.LayerTimeContext(timeContext)); - } - track.add(layer); - this.layers.push(layer); - layer.render(); - layer.update(); - if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) { - track.$layout.appendChild(this.cursorLayer.$el); - } - } - - ngOnDestroy(): void { - if (this.featureExtractionSubscription) { - this.featureExtractionSubscription.unsubscribe(); - } - if (this.playingStateSubscription) { - this.playingStateSubscription.unsubscribe(); - } - if (this.seekedSubscription) { - this.seekedSubscription.unsubscribe(); - } - if (this.onAudioDataSubscription) { - this.onAudioDataSubscription.unsubscribe(); - } - } - - seekStart(): void { - this.zoomOnMouseDown = this.timeline.timeContext.zoom; - this.offsetOnMouseDown = this.timeline.timeContext.offset; - } - - seekEnd(x: number): void { - const hasSameZoom: boolean = this.zoomOnMouseDown === - this.timeline.timeContext.zoom; - const hasSameOffset: boolean = this.offsetOnMouseDown === - this.timeline.timeContext.offset; - if (hasSameZoom && hasSameOffset) { - this.seek(x); - } - } - - seek(x: number): void { - if (this.timeline) { - const timeContext: any = this.timeline.timeContext; - const timeX = timeContext.timeToPixel.invert(x) - timeContext.offset; - if (this.isSeeking) { - this.audioService.seekTo(timeX); - } else { - if (this.highlightLayer) { - this.highlightLayer.currentPosition = timeX; - this.highlightLayer.update(); - } - } - } - } -} - -function deduceHigherLevelFeatureShape(featureData: FeatureList, - descriptor: OutputDescriptor) -: HigherLevelFeatureShape { - // TODO look at output descriptor instead of directly inspecting features - const hasDuration = descriptor.configured.hasDuration; - const binCount = descriptor.configured.binCount; - const isMarker = !hasDuration - && binCount === 0 - && featureData[0].featureValues == null; - - const isMaybeNote = getCanonicalNoteLikeUnit(descriptor.configured.unit) - && [1, 2].find(nBins => nBins === binCount); - - const isRegionLike = hasDuration && featureData[0].timestamp != null; - - const isNote = isMaybeNote && isRegionLike; - const isRegion = !isMaybeNote && isRegionLike; - if (isMarker) { - return 'instants'; - } - if (isNote) { - return 'notes'; - } - if (isRegion) { - return 'regions'; - } - throw new Error('No shape could be deduced'); -} - -function getCanonicalNoteLikeUnit(unit: string): NoteLikeUnit | null { - const canonicalUnits: NoteLikeUnit[] = ['midi', 'hz']; - return canonicalUnits.find(canonicalUnit => { - return unit.toLowerCase().indexOf(canonicalUnit) >= 0; - }); -} - -function mapFeaturesToNotes(featureData: FeatureList, - descriptor: OutputDescriptor): Note[] { - const canonicalUnit = getCanonicalNoteLikeUnit(descriptor.configured.unit); - const isHz = canonicalUnit === 'hz'; - return featureData.map(feature => ({ - time: toSeconds(feature.timestamp), - duration: toSeconds(feature.duration), - pitch: isHz ? - frequencyToMidiNote(feature.featureValues[0]) : feature.featureValues[0] - })); -} - -function frequencyToMidiNote(frequency: number, - concertA: number = 440.0): number { - return 69 + 12 * Math.log2(frequency / concertA); -} diff -r f87a96ab1e3f -r 82d476b976e0 src/app/waveform/waves-ui-piper.d.ts --- a/src/app/waveform/waves-ui-piper.d.ts Thu May 25 17:52:46 2017 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,119 +0,0 @@ -/// -declare const AmbientInstance: WavesUserInterface; - -declare module 'waves-ui-piper' { - export default AmbientInstance; -} - -interface WavesUserInterface { - core: Core; - helpers: any; - states: any; - utils: Utilities; - shapes: any; -} - -interface MatrixEntity { - getColumnCount(): number; - getColumnHeight(): number; - getColumn(n: number): Float32Array | number[]; - getStepDuration(): number; - getStartTime(): number; - dispose(): void; -} - -type TimeContext = any; // TODO - -interface Area { - top: number; - left: number; - width: number; - height: number; -} - -interface Layer extends NodeJS.EventEmitter { - start: number; - offset: number; - duration: number; - stretchRatio: number; - yDomain: number[]; - opacity: number; - readonly timeToPixel: () => (time: number) => number; - readonly valueToPixel: () => (value: number) => number; - readonly items: Element[]; - readonly selectedItems: Element[]; - data: ArrayLike | Object; - destroy(): void; - configureTimeContextBehaviour(ctor: ObjectConstructor): void; - setTimeContext(context: TimeContext): void; - configureShape(ctor: ObjectConstructor /* TODO BaseShape*/, - accessors: Object, - options: Object): void; - configureCommonShape(ctor: ObjectConstructor /* TODO BaseShape*/, - accessors: Object, - options: Object): void; - setBehaviour(behaviour: Object /* TODO BaseBehavior */): void; - select(...$items: Element[]); - unselect(...$items: Element[]); - toggleSelection(...$items: Element[]); - edit($items: Element[], dx: number, dy: number, $target: Element): void; - setContextEditable(bool: boolean): void; - editContext(dx: number, dy: number, $target: Element): void; - stretchContext(dx: number, dy: number, $target: Element): void; - getItemFromDOMElement($el: Element): Element | null; - getDatumFromItem($item: Element): Object | any[] | null; - getDatumFromDOMElement($item: Element): Object | any[] | null; - hasItem($item: Element): boolean; - hasElement($el: Element): boolean; - getItemsInArea(area: Area): Element[]; - render(): void; - update(): void; - updateContainer(): void; - updateShapes(): void; -} - -interface LayerConstructor { - new(dataType: 'entity' | 'collection', - data: ArrayLike | Object, - options: Object): Layer; -} - -interface MatrixEntityConstructor { - new(): MatrixEntity; -} - -interface PrefilledMatrixEntityConstructor { - new(data: Float32Array[] | number[][], - startTime: number, - stepDuration: number): MatrixEntity; -} - -interface Utilities { - MatrixEntity: MatrixEntityConstructor; - PrefilledMatrixEntity: PrefilledMatrixEntityConstructor; - scales: any; -} - -interface Core { - Layer: LayerConstructor; - LayerTimeContext: any; // TODO - Timeline: any; // TODO - TimelineTimeContext: TimelineTimeContextConstructor; -} - -type Timeline = any; - -interface TimelineTimeContext { - pixelsPerSecond: number; - readonly computedPixelsPerSecond: number; - offset: number; - zoom: number; - visibleWidth: number; - readonly visibleDuration: number; - maintainVisibleDuration: boolean; - timeToPixel: (time: number) => number; -} - -interface TimelineTimeContextConstructor { - new(pixelsPerSecond: number, visibleWidth: number): TimelineTimeContext; -}