annotate src/app/waveform/waveform.component.ts @ 110:9890436bcc9a

Bodge in okay-ish pan and zoom, with a lot of dupe from CenteredZoomState and state flying about everywhere.
author Lucas Thompson <dev@lucas.im>
date Fri, 10 Mar 2017 18:24:52 +0000
parents 68fe21cfda2a
children 689c1bfe8e68
rev   line source
dev@10 1 import {
dev@51 2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
dev@51 3 OnDestroy
dev@10 4 } from '@angular/core';
dev@39 5 import {AudioPlayerService} from "../services/audio-player/audio-player.service";
dev@36 6 import wavesUI from 'waves-ui';
dev@63 7 import {
dev@64 8 FeatureExtractionService
dev@63 9 } from "../services/feature-extraction/feature-extraction.service";
dev@51 10 import {Subscription} from "rxjs";
dev@63 11 import {
dev@63 12 FeatureCollection,
dev@64 13 FixedSpacedFeatures, SimpleResponse
dev@63 14 } from "piper/HigherLevelUtilities";
dev@53 15 import {toSeconds} from "piper";
dev@67 16 import {FeatureList, Feature} from "piper/Feature";
dev@81 17 import * as Hammer from 'hammerjs';
dev@8 18
dev@20 19 type Timeline = any; // TODO what type actually is it.. start a .d.ts for waves-ui?
dev@54 20 type Layer = any;
dev@54 21 type Track = any;
dev@59 22 type DisposableIndex = number;
dev@59 23 type Colour = string;
dev@6 24
dev@6 25 @Component({
dev@6 26 selector: 'app-waveform',
dev@6 27 templateUrl: './waveform.component.html',
dev@6 28 styleUrls: ['./waveform.component.css']
dev@6 29 })
dev@51 30 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
dev@20 31
dev@8 32 @ViewChild('track') trackDiv: ElementRef;
dev@6 33
dev@54 34 private _audioBuffer: AudioBuffer;
dev@54 35 private timeline: Timeline;
dev@54 36 private cursorLayer: any;
dev@54 37 private disposableLayers: Layer[];
dev@59 38 private colouredLayers: Map<DisposableIndex, Colour>;
dev@16 39
dev@16 40 @Input()
dev@16 41 set audioBuffer(buffer: AudioBuffer) {
dev@16 42 this._audioBuffer = buffer || undefined;
dev@20 43 if (this.audioBuffer)
dev@20 44 this.renderWaveform(this.audioBuffer);
dev@16 45 }
dev@16 46
dev@16 47 get audioBuffer(): AudioBuffer {
dev@16 48 return this._audioBuffer;
dev@16 49 }
dev@16 50
dev@51 51 private featureExtractionSubscription: Subscription;
dev@53 52 private playingStateSubscription: Subscription;
dev@53 53 private seekedSubscription: Subscription;
dev@53 54 private isPlaying: boolean;
dev@110 55 private offsetAtPanStart: number;
dev@110 56 private initialZoom: number;
dev@110 57 private initialDistance: number;
dev@51 58
dev@31 59 constructor(private audioService: AudioPlayerService,
dev@51 60 private piperService: FeatureExtractionService,
dev@51 61 public ngZone: NgZone) {
dev@59 62 this.colouredLayers = new Map();
dev@54 63 this.disposableLayers = [];
dev@54 64 this._audioBuffer = undefined;
dev@54 65 this.timeline = undefined;
dev@54 66 this.cursorLayer = undefined;
dev@53 67 this.isPlaying = false;
dev@59 68 const colours = function* () {
dev@59 69 const circularColours = [
dev@59 70 'black',
dev@59 71 'red',
dev@59 72 'green',
dev@59 73 'purple',
dev@59 74 'orange'
dev@59 75 ];
dev@59 76 let index = 0;
dev@59 77 const nColours = circularColours.length;
dev@59 78 while (true) {
dev@59 79 yield circularColours[index = ++index % nColours];
dev@59 80 }
dev@59 81 }();
dev@59 82
dev@51 83 this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe(
dev@51 84 features => {
dev@59 85 this.renderFeatures(features, colours.next().value);
dev@51 86 });
dev@53 87 this.playingStateSubscription = audioService.playingStateChange$.subscribe(
dev@53 88 isPlaying => {
dev@53 89 this.isPlaying = isPlaying;
dev@53 90 if (this.isPlaying)
dev@53 91 this.animate();
dev@53 92 });
dev@53 93 this.seekedSubscription = audioService.seeked$.subscribe(() => {
dev@53 94 if (!this.isPlaying)
dev@53 95 this.animate();
dev@53 96 });
dev@51 97 }
dev@51 98
dev@53 99 ngOnInit() {
dev@53 100 }
dev@10 101
dev@10 102 ngAfterViewInit(): void {
dev@51 103 this.timeline = this.renderTimeline();
dev@20 104 }
dev@20 105
dev@20 106 renderTimeline(duration: number = 1.0): Timeline {
dev@18 107 const track: HTMLElement = this.trackDiv.nativeElement;
dev@20 108 track.innerHTML = "";
dev@18 109 const height: number = track.getBoundingClientRect().height;
dev@18 110 const width: number = track.getBoundingClientRect().width;
dev@18 111 const pixelsPerSecond = width / duration;
dev@18 112 const timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
dev@18 113 timeline.createTrack(track, height, 'main');
dev@54 114 return timeline;
dev@54 115 }
dev@18 116
cannam@108 117 estimatePercentile(matrix, percentile) {
cannam@108 118 // our sample is not evenly distributed across the whole data set:
cannam@108 119 // it is guaranteed to include at least one sample from every
cannam@108 120 // column, and could sample some values more than once. But it
cannam@108 121 // should be good enough in most cases (todo: show this)
cannam@109 122 if (matrix.length === 0) {
cannam@109 123 return 0.0;
cannam@109 124 }
cannam@108 125 const w = matrix.length;
cannam@108 126 const h = matrix[0].length;
cannam@108 127 const n = w * h;
cannam@109 128 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
cannam@108 129 let m_per = Math.floor(m / w);
cannam@108 130 if (m_per < 1) m_per = 1;
cannam@108 131 let sample = [];
cannam@108 132 for (let x = 0; x < w; ++x) {
cannam@108 133 for (let i = 0; i < m_per; ++i) {
cannam@108 134 const y = Math.floor(Math.random() * h);
cannam@109 135 const value = matrix[x][y];
cannam@109 136 if (!isNaN(value) && value !== Infinity) {
cannam@109 137 sample.push(value);
cannam@109 138 }
cannam@108 139 }
cannam@108 140 }
cannam@109 141 if (sample.length === 0) {
cannam@109 142 console.log("WARNING: No samples gathered, even though we hoped for " +
cannam@109 143 (m_per * w) + " of them");
cannam@109 144 return 0.0;
cannam@109 145 }
cannam@108 146 sample.sort((a,b) => { return a - b; });
cannam@108 147 const ix = Math.floor((sample.length * percentile) / 100);
cannam@108 148 console.log("Estimating " + percentile + "-%ile of " +
cannam@108 149 n + "-sample dataset (" + w + " x " + h + ") as value " + ix +
cannam@108 150 " of sorted " + sample.length + "-sample subset");
cannam@108 151 const estimate = sample[ix];
cannam@108 152 console.log("Estimate is: " + estimate + " (where min sampled value = " +
cannam@108 153 sample[0] + " and max = " + sample[sample.length-1] + ")");
cannam@108 154 return estimate;
cannam@108 155 }
cannam@108 156
cannam@108 157 interpolatingMapper(hexColours) {
cannam@108 158 const colours = hexColours.map(n => {
cannam@108 159 const i = parseInt(n, 16);
cannam@108 160 return [ (i >> 16) & 255, (i >> 8) & 255, i & 255, 255 ];
cannam@108 161 });
cannam@108 162 const last = colours.length - 1;
cannam@108 163 return (value => {
cannam@108 164 // value must be in the range [0,1]. We quantize to 256 levels,
cannam@108 165 // as the PNG encoder deep inside uses a limited palette for
cannam@108 166 // simplicity. Should document this for the mapper. Also that
cannam@108 167 // individual colour values should be integers
cannam@108 168 value = Math.round(value * 255) / 255;
cannam@108 169 const m = value * last;
cannam@108 170 if (m >= last) {
cannam@108 171 return colours[last];
cannam@108 172 }
cannam@108 173 if (m <= 0) {
cannam@108 174 return colours[0];
cannam@108 175 }
cannam@108 176 const base = Math.floor(m);
cannam@108 177 const prop0 = base + 1.0 - m;
cannam@108 178 const prop1 = m - base;
cannam@108 179 const c0 = colours[base];
cannam@108 180 const c1 = colours[base+1];
cannam@108 181 return [ Math.round(c0[0] * prop0 + c1[0] * prop1),
cannam@108 182 Math.round(c0[1] * prop0 + c1[1] * prop1),
cannam@108 183 Math.round(c0[2] * prop0 + c1[2] * prop1),
cannam@108 184 255 ];
cannam@108 185 });
cannam@108 186 }
dev@110 187
cannam@108 188 iceMapper() {
dev@110 189 let hexColours = [
cannam@108 190 // Based on ColorBrewer ylGnBu
cannam@108 191 "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5",
cannam@108 192 "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040"
cannam@108 193 ];
cannam@108 194 hexColours.reverse();
cannam@108 195 return this.interpolatingMapper(hexColours);
cannam@108 196 }
dev@110 197
dev@54 198 renderWaveform(buffer: AudioBuffer): void {
dev@54 199 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@54 200 const mainTrack = this.timeline.getTrackById('main');
dev@54 201 if (this.timeline) {
dev@54 202 // resize
dev@54 203 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
dev@55 204
dev@54 205 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
dev@55 206 const timeContextChildren = this.timeline.timeContext._children;
dev@55 207
dev@60 208 for (let i = 0, length = this.disposableLayers.length; i < length; ++i) {
dev@54 209 let layer = this.disposableLayers.pop();
dev@54 210 mainTrack.remove(layer);
dev@55 211
dev@55 212 const index = timeContextChildren.indexOf(layer.timeContext);
dev@55 213 if (index >= 0)
dev@55 214 timeContextChildren.splice(index, 1);
dev@54 215 layer.destroy();
dev@54 216 }
dev@59 217 this.colouredLayers.clear();
dev@59 218
dev@54 219 this.timeline.visibleWidth = width;
dev@54 220 this.timeline.pixelsPerSecond = width / buffer.duration;
dev@54 221 mainTrack.height = height;
dev@54 222 } else {
dev@54 223 this.timeline = this.renderTimeline(buffer.duration)
dev@54 224 }
dev@83 225 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
cannam@106 226
dev@18 227 // time axis
dev@18 228 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@18 229 height: height,
cannam@106 230 color: '#b0b0b0'
dev@18 231 });
dev@54 232 this.addLayer(timeAxis, mainTrack, this.timeline.timeContext, true);
dev@18 233
dev@20 234 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
dev@10 235 top: 10,
dev@20 236 height: height * 0.9,
dev@16 237 color: 'darkblue'
dev@16 238 });
dev@54 239 this.addLayer(waveformLayer, mainTrack, this.timeline.timeContext);
cannam@106 240 /*
cannam@106 241 const spectrogramLayer = new wavesUI.helpers.SpectrogramLayer(buffer, {
cannam@106 242 top: 10,
cannam@106 243 height: height * 0.9,
cannam@106 244 stepSize: 512,
cannam@106 245 fftSize: 1024
cannam@106 246 });
cannam@106 247 this.addLayer(spectrogramLayer, mainTrack, this.timeline.timeContext);
dev@110 248 */
dev@53 249 this.cursorLayer = new wavesUI.helpers.CursorLayer({
dev@31 250 height: height
dev@31 251 });
dev@54 252 this.addLayer(this.cursorLayer, mainTrack, this.timeline.timeContext);
dev@51 253 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
dev@54 254 mainTrack.render();
dev@54 255 mainTrack.update();
dev@81 256
dev@81 257
dev@81 258 if ('ontouchstart' in window) {
dev@110 259 interface Point {
dev@110 260 x: number;
dev@110 261 y: number;
dev@110 262 }
dev@110 263
dev@110 264 const pixelToExponent: Function = wavesUI.utils.scales.linear()
dev@110 265 .domain([0, 100]) // 100px => factor 2
dev@110 266 .range([0, 1]);
dev@110 267
dev@110 268 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
dev@110 269 return Math.pow(
dev@110 270 Math.pow(p2.x - p1.x, 2) +
dev@110 271 Math.pow(p2.y - p1.y, 2), 0.5);
dev@110 272 };
dev@110 273
dev@84 274 const hammertime = new Hammer(this.trackDiv.nativeElement);
dev@81 275 const scroll = (ev) => {
dev@110 276 this.timeline.timeContext.offset = this.offsetAtPanStart +
dev@110 277 this.timeline.timeContext.timeToPixel.invert(ev.deltaX);
dev@81 278 this.timeline.tracks.update();
dev@81 279 };
dev@84 280
dev@81 281 const zoom = (ev) => {
dev@81 282 const minZoom = this.timeline.state.minZoom;
dev@81 283 const maxZoom = this.timeline.state.maxZoom;
dev@110 284 const distance = calculateDistance({
dev@110 285 x: ev.pointers[0].clientX,
dev@110 286 y: ev.pointers[0].clientY
dev@110 287 }, {
dev@110 288 x: ev.pointers[1].clientX,
dev@110 289 y: ev.pointers[1].clientY
dev@110 290 });
dev@110 291
dev@110 292 const lastCenterTime =
dev@110 293 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
dev@110 294
dev@110 295 const exponent = pixelToExponent(distance - this.initialDistance);
dev@110 296 const targetZoom = this.initialZoom * Math.pow(2, exponent);
dev@110 297
dev@110 298 this.timeline.timeContext.zoom =
dev@110 299 Math.min(Math.max(targetZoom, minZoom), maxZoom);
dev@110 300
dev@110 301 const newCenterTime =
dev@110 302 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
dev@110 303
dev@96 304 this.timeline.timeContext.offset += newCenterTime - lastCenterTime;
dev@81 305 this.timeline.tracks.update();
dev@81 306 };
dev@84 307 const seek = (ev) => {
dev@84 308 this.audioService.seekTo(
dev@84 309 this.timeline.timeContext.timeToPixel.invert(ev.center.x) - this.timeline.timeContext.offset
dev@84 310 );
dev@84 311 };
dev@81 312 hammertime.get('pinch').set({ enable: true });
dev@110 313 hammertime.on('panstart', () => {
dev@110 314 this.offsetAtPanStart = this.timeline.timeContext.offset;
dev@110 315 });
dev@81 316 hammertime.on('panleft', scroll);
dev@81 317 hammertime.on('panright', scroll);
dev@110 318 hammertime.on('pinchstart', (e) => {
dev@110 319 this.initialZoom = this.timeline.timeContext.zoom;
dev@110 320
dev@110 321 this.initialDistance = calculateDistance({
dev@110 322 x: e.pointers[0].clientX,
dev@110 323 y: e.pointers[0].clientY
dev@110 324 }, {
dev@110 325 x: e.pointers[1].clientX,
dev@110 326 y: e.pointers[1].clientY
dev@110 327 });
dev@110 328 });
dev@81 329 hammertime.on('pinch', zoom);
dev@84 330 hammertime.on('tap', seek);
dev@81 331 }
dev@81 332
dev@53 333 this.animate();
dev@53 334 }
dev@53 335
dev@53 336 // TODO refactor - this doesn't belong here
dev@64 337 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
dev@64 338 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return;
dev@64 339 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return;
dev@64 340 const features: FeatureCollection = (extracted.features as FeatureCollection);
dev@64 341 const outputDescriptor = extracted.outputDescriptor;
dev@64 342 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@64 343 const mainTrack = this.timeline.getTrackById('main');
dev@64 344
dev@64 345 // TODO refactor all of this
dev@63 346 switch (features.shape) {
dev@64 347 case 'vector': {
dev@63 348 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
dev@63 349 const featureData = (features.data as Float32Array);
dev@68 350 if (featureData.length === 0) return;
dev@63 351 const normalisationFactor = 1.0 /
dev@63 352 featureData.reduce(
dev@63 353 (currentMax, feature) => Math.max(currentMax, feature),
dev@63 354 -Infinity
dev@63 355 );
dev@67 356
dev@63 357 const plotData = [...featureData].map((feature, i) => {
dev@63 358 return {
dev@63 359 cx: i * stepDuration,
dev@63 360 cy: feature * normalisationFactor
dev@63 361 };
dev@63 362 });
dev@67 363
dev@105 364 let lineLayer = new wavesUI.helpers.LineLayer(plotData, {
dev@63 365 color: colour,
dev@64 366 height: height
dev@63 367 });
dev@63 368 this.colouredLayers.set(this.addLayer(
dev@105 369 lineLayer,
dev@64 370 mainTrack,
dev@63 371 this.timeline.timeContext
dev@63 372 ), colour);
dev@63 373 break;
dev@64 374 }
dev@64 375 case 'list': {
dev@64 376 const featureData = (features.data as FeatureList);
dev@68 377 if (featureData.length === 0) return;
dev@64 378 // TODO look at output descriptor instead of directly inspecting features
dev@64 379 const hasDuration = outputDescriptor.configured.hasDuration;
dev@64 380 const isMarker = !hasDuration
dev@64 381 && outputDescriptor.configured.binCount === 0
dev@64 382 && featureData[0].featureValues == null;
dev@64 383 const isRegion = hasDuration
dev@64 384 && featureData[0].timestamp != null;
dev@64 385 // TODO refactor, this is incomprehensible
dev@64 386 if (isMarker) {
dev@64 387 const plotData = featureData.map(feature => {
dev@64 388 return {x: toSeconds(feature.timestamp)}
dev@64 389 });
dev@64 390 let markerLayer = new wavesUI.helpers.MarkerLayer(plotData, {
dev@64 391 height: height,
dev@64 392 color: colour,
dev@64 393 });
dev@64 394 this.colouredLayers.set(this.addLayer(
dev@64 395 markerLayer,
dev@64 396 mainTrack,
dev@64 397 this.timeline.timeContext
dev@64 398 ), colour);
dev@64 399 } else if (isRegion) {
dev@67 400 const binCount = outputDescriptor.configured.binCount || 0;
dev@67 401 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
dev@64 402 const getSegmentArgs = () => {
dev@64 403 if (isBarRegion) {
dev@64 404
dev@67 405 // TODO refactor - this is messy
dev@67 406 interface FoldsToNumber<T> {
dev@67 407 reduce(fn: (previousValue: number,
dev@67 408 currentValue: T,
dev@67 409 currentIndex: number,
dev@67 410 array: ArrayLike<T>) => number,
dev@67 411 initialValue?: number): number;
dev@67 412 }
dev@64 413
dev@67 414 // TODO potentially change impl., i.e avoid reduce
dev@67 415 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
dev@67 416 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
dev@67 417 };
dev@67 418
dev@67 419 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
dev@67 420 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
dev@67 421 };
dev@67 422
dev@67 423 const min = findMin<Feature>(featureData, (x: Feature) => {
dev@67 424 return findMin<number>(x.featureValues, y => y);
dev@67 425 });
dev@67 426
dev@67 427 const max = findMax<Feature>(featureData, (x: Feature) => {
dev@67 428 return findMax<number>(x.featureValues, y => y);
dev@67 429 });
dev@67 430
dev@67 431 const barHeight = 1.0 / height;
dev@64 432 return [
dev@67 433 featureData.reduce((bars, feature) => {
dev@67 434 const staticProperties = {
dev@64 435 x: toSeconds(feature.timestamp),
dev@64 436 width: toSeconds(feature.duration),
dev@67 437 height: min + barHeight,
dev@64 438 color: colour,
dev@64 439 opacity: 0.8
dev@67 440 };
dev@67 441 // TODO avoid copying Float32Array to an array - map is problematic here
dev@67 442 return bars.concat([...feature.featureValues]
dev@67 443 .map(val => Object.assign({}, staticProperties, {y: val})))
dev@67 444 }, []),
dev@67 445 {yDomain: [min, max + barHeight], height: height} as any
dev@67 446 ];
dev@64 447 } else {
dev@64 448 return [featureData.map(feature => {
dev@64 449 return {
dev@64 450 x: toSeconds(feature.timestamp),
dev@64 451 width: toSeconds(feature.duration),
dev@64 452 color: colour,
dev@64 453 opacity: 0.8
dev@64 454 }
dev@64 455 }), {height: height}];
dev@64 456 }
dev@64 457 };
dev@64 458
dev@64 459 let segmentLayer = new wavesUI.helpers.SegmentLayer(
dev@64 460 ...getSegmentArgs()
dev@64 461 );
dev@64 462 this.colouredLayers.set(this.addLayer(
dev@64 463 segmentLayer,
dev@64 464 mainTrack,
dev@64 465 this.timeline.timeContext
dev@64 466 ), colour);
dev@64 467 }
dev@64 468 break;
dev@64 469 }
cannam@106 470 case 'matrix': {
cannam@108 471 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
cannam@108 472 const matrixData = (features.data as Float32Array[]);
cannam@108 473 if (matrixData.length === 0) return;
cannam@109 474 console.log("matrix data length = " + matrixData.length);
cannam@109 475 console.log("height of first column = " + matrixData[0].length);
cannam@109 476 const targetValue = this.estimatePercentile(matrixData, 95);
cannam@108 477 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
cannam@108 478 console.log("setting gain to " + gain);
cannam@108 479 const matrixEntity = new wavesUI.utils.PrefilledMatrixEntity(matrixData);
cannam@108 480 let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
cannam@108 481 gain,
cannam@109 482 height: height * 0.8,
cannam@109 483 top: height * 0.1,
cannam@109 484 normalise: 'none',
cannam@108 485 mapper: this.iceMapper()
cannam@108 486 });
cannam@108 487 this.colouredLayers.set(this.addLayer(
cannam@108 488 matrixLayer,
cannam@108 489 mainTrack,
cannam@108 490 this.timeline.timeContext
cannam@108 491 ), colour);
cannam@108 492 break;
cannam@106 493 }
dev@67 494 default:
cannam@106 495 console.log("Cannot render an appropriate layer for feature shape '" +
cannam@106 496 features.shape + "'");
dev@63 497 }
dev@59 498
dev@56 499 this.timeline.tracks.update();
dev@53 500 }
dev@53 501
dev@53 502 private animate(): void {
dev@31 503 this.ngZone.runOutsideAngular(() => {
dev@31 504 // listen for time passing...
dev@31 505 const updateSeekingCursor = () => {
dev@53 506 const currentTime = this.audioService.getCurrentTime();
dev@53 507 this.cursorLayer.currentPosition = currentTime;
dev@53 508 this.cursorLayer.update();
dev@53 509
dev@53 510 const currentOffset = this.timeline.timeContext.offset;
dev@53 511 const offsetTimestamp = currentOffset
dev@53 512 + currentTime;
dev@53 513
dev@53 514 const visibleDuration = this.timeline.timeContext.visibleDuration;
dev@53 515 // TODO reduce duplication between directions and make more declarative
dev@53 516 // this kinda logic should also be tested
dev@53 517 const mustPageForward = offsetTimestamp > visibleDuration;
dev@53 518 const mustPageBackward = currentTime < -currentOffset;
dev@53 519
dev@53 520 if (mustPageForward) {
dev@53 521 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
dev@53 522
cannam@106 523 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
cannam@106 524 -currentTime + 0.5 * visibleDuration :
cannam@106 525 currentOffset - visibleDuration;
dev@51 526 this.timeline.tracks.update();
dev@34 527 }
dev@53 528
dev@53 529 if (mustPageBackward) {
dev@53 530 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
cannam@106 531 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
cannam@106 532 -currentTime + 0.5 * visibleDuration :
cannam@106 533 currentOffset + visibleDuration;
dev@51 534 this.timeline.tracks.update();
dev@34 535 }
dev@53 536
dev@53 537 if (this.isPlaying)
dev@53 538 requestAnimationFrame(updateSeekingCursor);
dev@31 539 };
dev@31 540 updateSeekingCursor();
dev@31 541 });
dev@6 542 }
dev@16 543
dev@59 544 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): DisposableIndex {
dev@54 545 timeContext.zoom = 1.0;
dev@54 546 if (!layer.timeContext) {
dev@54 547 layer.setTimeContext(isAxis ?
dev@54 548 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
dev@54 549 }
dev@54 550 track.add(layer);
dev@54 551 layer.render();
dev@54 552 layer.update();
dev@59 553 return this.disposableLayers.push(layer) - 1;
dev@59 554 }
dev@59 555
dev@59 556 private static changeColour(layer: Layer, colour: string): void {
dev@59 557 const butcherShapes = (shape) => {
dev@59 558 shape.install({color: () => colour});
dev@59 559 shape.params.color = colour;
dev@59 560 shape.update(layer._renderingContext, layer.data);
dev@59 561 };
dev@59 562
dev@59 563 layer._$itemCommonShapeMap.forEach(butcherShapes);
dev@59 564 layer._$itemShapeMap.forEach(butcherShapes);
dev@59 565 layer.render();
dev@59 566 layer.update();
dev@54 567 }
dev@54 568
dev@51 569 ngOnDestroy(): void {
dev@51 570 this.featureExtractionSubscription.unsubscribe();
dev@53 571 this.playingStateSubscription.unsubscribe();
dev@53 572 this.seekedSubscription.unsubscribe();
dev@51 573 }
dev@6 574 }