annotate src/app/waveform/waveform.component.ts @ 157:3f54066a0e01

Prevent seeking when panning with mouse. There's still potential for this approach to go wrong, for both zooming and panning. During the duration of the click, if the offset or zoom are returned to the starting position, the mouse will seek to its current x position.
author Lucas Thompson <dev@lucas.im>
date Tue, 21 Mar 2017 17:34:48 +0000
parents 53ddfd792844
children 28cb8530300b
rev   line source
dev@10 1 import {
dev@51 2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
dev@51 3 OnDestroy
dev@10 4 } from '@angular/core';
dev@39 5 import {AudioPlayerService} from "../services/audio-player/audio-player.service";
dev@36 6 import wavesUI from 'waves-ui';
dev@63 7 import {
dev@64 8 FeatureExtractionService
dev@63 9 } from "../services/feature-extraction/feature-extraction.service";
dev@51 10 import {Subscription} from "rxjs";
dev@63 11 import {
dev@63 12 FeatureCollection,
dev@64 13 FixedSpacedFeatures, SimpleResponse
dev@63 14 } from "piper/HigherLevelUtilities";
dev@53 15 import {toSeconds} from "piper";
dev@67 16 import {FeatureList, Feature} from "piper/Feature";
dev@81 17 import * as Hammer from 'hammerjs';
dev@129 18 import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram";
dev@8 19
dev@20 20 type Timeline = any; // TODO what type actually is it.. start a .d.ts for waves-ui?
dev@54 21 type Layer = any;
dev@54 22 type Track = any;
dev@59 23 type Colour = string;
dev@6 24
dev@6 25 @Component({
dev@6 26 selector: 'app-waveform',
dev@6 27 templateUrl: './waveform.component.html',
dev@6 28 styleUrls: ['./waveform.component.css']
dev@6 29 })
dev@51 30 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
dev@20 31
dev@8 32 @ViewChild('track') trackDiv: ElementRef;
dev@6 33
dev@54 34 private _audioBuffer: AudioBuffer;
dev@54 35 private timeline: Timeline;
dev@54 36 private cursorLayer: any;
dev@16 37
dev@16 38 @Input()
dev@16 39 set audioBuffer(buffer: AudioBuffer) {
dev@16 40 this._audioBuffer = buffer || undefined;
cannam@117 41 if (this.audioBuffer) {
dev@20 42 this.renderWaveform(this.audioBuffer);
cannam@117 43 this.renderSpectrogram(this.audioBuffer);
cannam@117 44 }
dev@16 45 }
dev@16 46
dev@16 47 get audioBuffer(): AudioBuffer {
dev@16 48 return this._audioBuffer;
dev@16 49 }
dev@16 50
dev@51 51 private featureExtractionSubscription: Subscription;
dev@53 52 private playingStateSubscription: Subscription;
dev@53 53 private seekedSubscription: Subscription;
dev@53 54 private isPlaying: boolean;
dev@110 55 private offsetAtPanStart: number;
dev@110 56 private initialZoom: number;
dev@110 57 private initialDistance: number;
dev@155 58 private zoomOnMouseDown: number;
dev@157 59 private offsetOnMouseDown: number;
dev@51 60
dev@31 61 constructor(private audioService: AudioPlayerService,
dev@51 62 private piperService: FeatureExtractionService,
dev@51 63 public ngZone: NgZone) {
dev@54 64 this._audioBuffer = undefined;
dev@54 65 this.timeline = undefined;
dev@54 66 this.cursorLayer = undefined;
dev@53 67 this.isPlaying = false;
dev@59 68 const colours = function* () {
dev@59 69 const circularColours = [
dev@59 70 'black',
dev@59 71 'red',
dev@59 72 'green',
dev@59 73 'purple',
dev@59 74 'orange'
dev@59 75 ];
dev@59 76 let index = 0;
dev@59 77 const nColours = circularColours.length;
dev@59 78 while (true) {
dev@59 79 yield circularColours[index = ++index % nColours];
dev@59 80 }
dev@59 81 }();
dev@59 82
dev@51 83 this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe(
dev@51 84 features => {
dev@59 85 this.renderFeatures(features, colours.next().value);
dev@51 86 });
dev@53 87 this.playingStateSubscription = audioService.playingStateChange$.subscribe(
dev@53 88 isPlaying => {
dev@53 89 this.isPlaying = isPlaying;
dev@53 90 if (this.isPlaying)
dev@53 91 this.animate();
dev@53 92 });
dev@53 93 this.seekedSubscription = audioService.seeked$.subscribe(() => {
dev@53 94 if (!this.isPlaying)
dev@53 95 this.animate();
dev@53 96 });
dev@51 97 }
dev@51 98
dev@53 99 ngOnInit() {
dev@53 100 }
dev@10 101
dev@10 102 ngAfterViewInit(): void {
dev@51 103 this.timeline = this.renderTimeline();
dev@20 104 }
dev@20 105
dev@20 106 renderTimeline(duration: number = 1.0): Timeline {
dev@18 107 const track: HTMLElement = this.trackDiv.nativeElement;
dev@20 108 track.innerHTML = "";
dev@18 109 const height: number = track.getBoundingClientRect().height;
dev@18 110 const width: number = track.getBoundingClientRect().width;
dev@18 111 const pixelsPerSecond = width / duration;
dev@18 112 const timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
cannam@117 113 timeline.createTrack(track, height/2, 'wave');
cannam@117 114 timeline.createTrack(track, height/2, 'grid');
dev@54 115 return timeline;
dev@54 116 }
dev@18 117
cannam@108 118 estimatePercentile(matrix, percentile) {
cannam@108 119 // our sample is not evenly distributed across the whole data set:
cannam@108 120 // it is guaranteed to include at least one sample from every
cannam@108 121 // column, and could sample some values more than once. But it
cannam@108 122 // should be good enough in most cases (todo: show this)
cannam@109 123 if (matrix.length === 0) {
cannam@109 124 return 0.0;
cannam@109 125 }
cannam@108 126 const w = matrix.length;
cannam@108 127 const h = matrix[0].length;
cannam@108 128 const n = w * h;
cannam@109 129 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
cannam@108 130 let m_per = Math.floor(m / w);
cannam@108 131 if (m_per < 1) m_per = 1;
cannam@108 132 let sample = [];
cannam@108 133 for (let x = 0; x < w; ++x) {
cannam@108 134 for (let i = 0; i < m_per; ++i) {
cannam@108 135 const y = Math.floor(Math.random() * h);
cannam@109 136 const value = matrix[x][y];
cannam@109 137 if (!isNaN(value) && value !== Infinity) {
cannam@109 138 sample.push(value);
cannam@109 139 }
cannam@108 140 }
cannam@108 141 }
cannam@109 142 if (sample.length === 0) {
cannam@109 143 console.log("WARNING: No samples gathered, even though we hoped for " +
cannam@109 144 (m_per * w) + " of them");
cannam@109 145 return 0.0;
cannam@109 146 }
cannam@108 147 sample.sort((a,b) => { return a - b; });
cannam@108 148 const ix = Math.floor((sample.length * percentile) / 100);
cannam@108 149 console.log("Estimating " + percentile + "-%ile of " +
cannam@108 150 n + "-sample dataset (" + w + " x " + h + ") as value " + ix +
cannam@108 151 " of sorted " + sample.length + "-sample subset");
cannam@108 152 const estimate = sample[ix];
cannam@108 153 console.log("Estimate is: " + estimate + " (where min sampled value = " +
cannam@108 154 sample[0] + " and max = " + sample[sample.length-1] + ")");
cannam@108 155 return estimate;
cannam@108 156 }
cannam@108 157
cannam@108 158 interpolatingMapper(hexColours) {
cannam@108 159 const colours = hexColours.map(n => {
cannam@108 160 const i = parseInt(n, 16);
cannam@118 161 return [ ((i >> 16) & 255) / 255.0,
cannam@118 162 ((i >> 8) & 255) / 255.0,
cannam@118 163 ((i) & 255) / 255.0 ];
cannam@108 164 });
cannam@108 165 const last = colours.length - 1;
cannam@108 166 return (value => {
cannam@108 167 const m = value * last;
cannam@108 168 if (m >= last) {
cannam@108 169 return colours[last];
cannam@108 170 }
cannam@108 171 if (m <= 0) {
cannam@108 172 return colours[0];
cannam@108 173 }
cannam@108 174 const base = Math.floor(m);
cannam@108 175 const prop0 = base + 1.0 - m;
cannam@108 176 const prop1 = m - base;
cannam@108 177 const c0 = colours[base];
cannam@108 178 const c1 = colours[base+1];
cannam@118 179 return [ c0[0] * prop0 + c1[0] * prop1,
cannam@118 180 c0[1] * prop0 + c1[1] * prop1,
cannam@118 181 c0[2] * prop0 + c1[2] * prop1 ];
cannam@108 182 });
cannam@108 183 }
dev@110 184
cannam@108 185 iceMapper() {
dev@110 186 let hexColours = [
cannam@108 187 // Based on ColorBrewer ylGnBu
cannam@108 188 "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5",
cannam@108 189 "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040"
cannam@108 190 ];
cannam@108 191 hexColours.reverse();
cannam@108 192 return this.interpolatingMapper(hexColours);
cannam@108 193 }
dev@110 194
cannam@118 195 hsv2rgb(h, s, v) { // all values in range [0, 1]
cannam@118 196 const i = Math.floor(h * 6);
cannam@118 197 const f = h * 6 - i;
cannam@118 198 const p = v * (1 - s);
cannam@118 199 const q = v * (1 - f * s);
cannam@118 200 const t = v * (1 - (1 - f) * s);
cannam@118 201 let r = 0, g = 0, b = 0;
cannam@118 202 switch (i % 6) {
cannam@118 203 case 0: r = v, g = t, b = p; break;
cannam@118 204 case 1: r = q, g = v, b = p; break;
cannam@118 205 case 2: r = p, g = v, b = t; break;
cannam@118 206 case 3: r = p, g = q, b = v; break;
cannam@118 207 case 4: r = t, g = p, b = v; break;
cannam@118 208 case 5: r = v, g = p, b = q; break;
cannam@118 209 }
cannam@118 210 return [ r, g, b ];
cannam@118 211 }
dev@122 212
cannam@118 213 greenMapper() {
cannam@118 214 const blue = 0.6666;
cannam@118 215 const pieslice = 0.3333;
cannam@118 216 return (value => {
cannam@118 217 const h = blue - value * 2.0 * pieslice;
cannam@118 218 const s = 0.5 + value / 2.0;
cannam@118 219 const v = value;
cannam@118 220 return this.hsv2rgb(h, s, v);
cannam@118 221 });
cannam@118 222 }
cannam@118 223
cannam@118 224 sunsetMapper() {
cannam@118 225 return (value => {
cannam@118 226 let r = (value - 0.24) * 2.38;
cannam@118 227 let g = (value - 0.64) * 2.777;
cannam@118 228 let b = (3.6 * value);
cannam@118 229 if (value > 0.277) b = 2.0 - b;
cannam@118 230 return [ r, g, b ];
cannam@118 231 });
cannam@118 232 }
cannam@118 233
dev@122 234 clearTimeline(): void {
dev@122 235 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
dev@122 236 const timeContextChildren = this.timeline.timeContext._children;
dev@122 237 for (let track of this.timeline.tracks) {
dev@122 238 if (track.layers.length === 0) { continue; }
dev@122 239 const trackLayers = Array.from(track.layers);
dev@122 240 while (trackLayers.length) {
dev@122 241 let layer: Layer = trackLayers.pop();
dev@122 242 track.remove(layer);
dev@122 243
dev@122 244 const index = timeContextChildren.indexOf(layer.timeContext);
dev@122 245 if (index >= 0) {
dev@122 246 timeContextChildren.splice(index, 1);
dev@122 247 }
dev@122 248 layer.destroy();
dev@122 249 }
dev@122 250 }
dev@122 251 }
dev@122 252
dev@54 253 renderWaveform(buffer: AudioBuffer): void {
cannam@117 254 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
cannam@117 255 const waveTrack = this.timeline.getTrackById('wave');
dev@54 256 if (this.timeline) {
dev@54 257 // resize
dev@54 258 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
dev@55 259
dev@122 260 this.clearTimeline();
dev@59 261
dev@54 262 this.timeline.visibleWidth = width;
dev@54 263 this.timeline.pixelsPerSecond = width / buffer.duration;
cannam@117 264 waveTrack.height = height;
dev@54 265 } else {
dev@54 266 this.timeline = this.renderTimeline(buffer.duration)
dev@54 267 }
dev@83 268 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
cannam@106 269
dev@18 270 // time axis
dev@18 271 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@18 272 height: height,
cannam@106 273 color: '#b0b0b0'
dev@18 274 });
cannam@117 275 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
dev@18 276
dev@20 277 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
dev@10 278 top: 10,
dev@20 279 height: height * 0.9,
dev@16 280 color: 'darkblue'
dev@16 281 });
cannam@117 282 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
cannam@117 283
dev@53 284 this.cursorLayer = new wavesUI.helpers.CursorLayer({
dev@31 285 height: height
dev@31 286 });
cannam@117 287 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
dev@51 288 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
cannam@117 289 waveTrack.render();
cannam@117 290 waveTrack.update();
dev@81 291
dev@81 292
dev@81 293 if ('ontouchstart' in window) {
dev@110 294 interface Point {
dev@110 295 x: number;
dev@110 296 y: number;
dev@110 297 }
dev@110 298
dev@113 299 let zoomGestureJustEnded: boolean = false;
dev@113 300
dev@110 301 const pixelToExponent: Function = wavesUI.utils.scales.linear()
dev@110 302 .domain([0, 100]) // 100px => factor 2
dev@110 303 .range([0, 1]);
dev@110 304
dev@110 305 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
dev@110 306 return Math.pow(
dev@110 307 Math.pow(p2.x - p1.x, 2) +
dev@110 308 Math.pow(p2.y - p1.y, 2), 0.5);
dev@110 309 };
dev@110 310
dev@84 311 const hammertime = new Hammer(this.trackDiv.nativeElement);
dev@81 312 const scroll = (ev) => {
dev@113 313 if (zoomGestureJustEnded) {
dev@113 314 zoomGestureJustEnded = false;
dev@113 315 console.log("Skip this event: likely a single touch dangling from pinch");
dev@113 316 return;
dev@113 317 }
dev@110 318 this.timeline.timeContext.offset = this.offsetAtPanStart +
dev@110 319 this.timeline.timeContext.timeToPixel.invert(ev.deltaX);
dev@81 320 this.timeline.tracks.update();
dev@81 321 };
dev@84 322
dev@81 323 const zoom = (ev) => {
dev@81 324 const minZoom = this.timeline.state.minZoom;
dev@81 325 const maxZoom = this.timeline.state.maxZoom;
dev@110 326 const distance = calculateDistance({
dev@110 327 x: ev.pointers[0].clientX,
dev@110 328 y: ev.pointers[0].clientY
dev@110 329 }, {
dev@110 330 x: ev.pointers[1].clientX,
dev@110 331 y: ev.pointers[1].clientY
dev@110 332 });
dev@110 333
dev@110 334 const lastCenterTime =
dev@110 335 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
dev@110 336
dev@110 337 const exponent = pixelToExponent(distance - this.initialDistance);
dev@110 338 const targetZoom = this.initialZoom * Math.pow(2, exponent);
dev@110 339
dev@110 340 this.timeline.timeContext.zoom =
dev@110 341 Math.min(Math.max(targetZoom, minZoom), maxZoom);
dev@110 342
dev@110 343 const newCenterTime =
dev@110 344 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
dev@110 345
dev@96 346 this.timeline.timeContext.offset += newCenterTime - lastCenterTime;
dev@81 347 this.timeline.tracks.update();
dev@81 348 };
dev@81 349 hammertime.get('pinch').set({ enable: true });
dev@110 350 hammertime.on('panstart', () => {
dev@110 351 this.offsetAtPanStart = this.timeline.timeContext.offset;
dev@110 352 });
dev@81 353 hammertime.on('panleft', scroll);
dev@81 354 hammertime.on('panright', scroll);
dev@110 355 hammertime.on('pinchstart', (e) => {
dev@110 356 this.initialZoom = this.timeline.timeContext.zoom;
dev@110 357
dev@110 358 this.initialDistance = calculateDistance({
dev@110 359 x: e.pointers[0].clientX,
dev@110 360 y: e.pointers[0].clientY
dev@110 361 }, {
dev@110 362 x: e.pointers[1].clientX,
dev@110 363 y: e.pointers[1].clientY
dev@110 364 });
dev@110 365 });
dev@81 366 hammertime.on('pinch', zoom);
dev@113 367 hammertime.on('pinchend', () => {
dev@113 368 zoomGestureJustEnded = true;
dev@113 369 });
dev@81 370 }
dev@81 371
dev@53 372 this.animate();
dev@53 373 }
dev@53 374
cannam@117 375 renderSpectrogram(buffer: AudioBuffer): void {
cannam@117 376 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
cannam@117 377 const gridTrack = this.timeline.getTrackById('grid');
cannam@117 378
dev@129 379 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
cannam@118 380 top: height * 0.05,
cannam@117 381 height: height * 0.9,
cannam@117 382 stepSize: 512,
dev@129 383 blockSize: 1024,
cannam@118 384 normalise: 'none',
cannam@118 385 mapper: this.sunsetMapper()
cannam@117 386 });
cannam@117 387 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
cannam@117 388
cannam@117 389 this.timeline.tracks.update();
cannam@117 390 }
cannam@117 391
dev@53 392 // TODO refactor - this doesn't belong here
dev@64 393 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
dev@64 394 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return;
dev@64 395 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return;
dev@64 396 const features: FeatureCollection = (extracted.features as FeatureCollection);
dev@64 397 const outputDescriptor = extracted.outputDescriptor;
cannam@118 398 const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
cannam@118 399 const waveTrack = this.timeline.getTrackById('wave');
dev@64 400
dev@64 401 // TODO refactor all of this
dev@63 402 switch (features.shape) {
dev@64 403 case 'vector': {
dev@63 404 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
dev@63 405 const featureData = (features.data as Float32Array);
dev@68 406 if (featureData.length === 0) return;
dev@63 407 const normalisationFactor = 1.0 /
dev@63 408 featureData.reduce(
dev@63 409 (currentMax, feature) => Math.max(currentMax, feature),
dev@63 410 -Infinity
dev@63 411 );
dev@67 412
dev@63 413 const plotData = [...featureData].map((feature, i) => {
dev@63 414 return {
dev@63 415 cx: i * stepDuration,
dev@63 416 cy: feature * normalisationFactor
dev@63 417 };
dev@63 418 });
dev@67 419
dev@105 420 let lineLayer = new wavesUI.helpers.LineLayer(plotData, {
dev@63 421 color: colour,
dev@64 422 height: height
dev@63 423 });
dev@122 424 this.addLayer(
dev@105 425 lineLayer,
cannam@117 426 waveTrack,
dev@63 427 this.timeline.timeContext
dev@122 428 );
dev@63 429 break;
dev@64 430 }
dev@64 431 case 'list': {
dev@64 432 const featureData = (features.data as FeatureList);
dev@68 433 if (featureData.length === 0) return;
dev@64 434 // TODO look at output descriptor instead of directly inspecting features
dev@64 435 const hasDuration = outputDescriptor.configured.hasDuration;
dev@64 436 const isMarker = !hasDuration
dev@64 437 && outputDescriptor.configured.binCount === 0
dev@64 438 && featureData[0].featureValues == null;
dev@64 439 const isRegion = hasDuration
dev@64 440 && featureData[0].timestamp != null;
cannam@149 441 console.log("Have list features: length " + featureData.length +
cannam@149 442 ", isMarker " + isMarker + ", isRegion " + isRegion +
cannam@149 443 ", hasDuration " + hasDuration);
dev@64 444 // TODO refactor, this is incomprehensible
dev@64 445 if (isMarker) {
dev@64 446 const plotData = featureData.map(feature => {
cannam@149 447 return {time: toSeconds(feature.timestamp)}
dev@64 448 });
cannam@149 449 let featureLayer = new wavesUI.helpers.TickLayer(plotData, {
dev@64 450 height: height,
dev@64 451 color: colour,
dev@64 452 });
dev@122 453 this.addLayer(
cannam@149 454 featureLayer,
cannam@117 455 waveTrack,
dev@64 456 this.timeline.timeContext
dev@122 457 );
dev@64 458 } else if (isRegion) {
cannam@149 459 console.log("Output is of region type");
dev@67 460 const binCount = outputDescriptor.configured.binCount || 0;
dev@67 461 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
dev@64 462 const getSegmentArgs = () => {
dev@64 463 if (isBarRegion) {
dev@64 464
dev@67 465 // TODO refactor - this is messy
dev@67 466 interface FoldsToNumber<T> {
dev@67 467 reduce(fn: (previousValue: number,
dev@67 468 currentValue: T,
dev@67 469 currentIndex: number,
dev@67 470 array: ArrayLike<T>) => number,
dev@67 471 initialValue?: number): number;
dev@67 472 }
dev@64 473
dev@67 474 // TODO potentially change impl., i.e avoid reduce
dev@67 475 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
dev@67 476 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
dev@67 477 };
dev@67 478
dev@67 479 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
dev@67 480 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
dev@67 481 };
dev@67 482
dev@67 483 const min = findMin<Feature>(featureData, (x: Feature) => {
dev@67 484 return findMin<number>(x.featureValues, y => y);
dev@67 485 });
dev@67 486
dev@67 487 const max = findMax<Feature>(featureData, (x: Feature) => {
dev@67 488 return findMax<number>(x.featureValues, y => y);
dev@67 489 });
dev@67 490
dev@67 491 const barHeight = 1.0 / height;
dev@64 492 return [
dev@67 493 featureData.reduce((bars, feature) => {
dev@67 494 const staticProperties = {
dev@64 495 x: toSeconds(feature.timestamp),
dev@64 496 width: toSeconds(feature.duration),
dev@67 497 height: min + barHeight,
dev@64 498 color: colour,
dev@64 499 opacity: 0.8
dev@67 500 };
dev@67 501 // TODO avoid copying Float32Array to an array - map is problematic here
dev@67 502 return bars.concat([...feature.featureValues]
dev@67 503 .map(val => Object.assign({}, staticProperties, {y: val})))
dev@67 504 }, []),
dev@67 505 {yDomain: [min, max + barHeight], height: height} as any
dev@67 506 ];
dev@64 507 } else {
dev@64 508 return [featureData.map(feature => {
dev@64 509 return {
dev@64 510 x: toSeconds(feature.timestamp),
dev@64 511 width: toSeconds(feature.duration),
dev@64 512 color: colour,
dev@64 513 opacity: 0.8
dev@64 514 }
dev@64 515 }), {height: height}];
dev@64 516 }
dev@64 517 };
dev@64 518
dev@64 519 let segmentLayer = new wavesUI.helpers.SegmentLayer(
dev@64 520 ...getSegmentArgs()
dev@64 521 );
dev@122 522 this.addLayer(
dev@64 523 segmentLayer,
cannam@117 524 waveTrack,
dev@64 525 this.timeline.timeContext
dev@122 526 );
dev@64 527 }
dev@64 528 break;
dev@64 529 }
cannam@106 530 case 'matrix': {
cannam@108 531 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
cannam@120 532 //!!! + start time
cannam@108 533 const matrixData = (features.data as Float32Array[]);
cannam@108 534 if (matrixData.length === 0) return;
cannam@109 535 console.log("matrix data length = " + matrixData.length);
cannam@109 536 console.log("height of first column = " + matrixData[0].length);
cannam@109 537 const targetValue = this.estimatePercentile(matrixData, 95);
cannam@108 538 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
cannam@108 539 console.log("setting gain to " + gain);
cannam@120 540 const matrixEntity =
cannam@120 541 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
cannam@120 542 0, // startTime
cannam@120 543 stepDuration);
cannam@108 544 let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
cannam@108 545 gain,
cannam@118 546 height: height * 0.9,
cannam@118 547 top: height * 0.05,
cannam@109 548 normalise: 'none',
cannam@108 549 mapper: this.iceMapper()
cannam@108 550 });
dev@122 551 this.addLayer(
cannam@108 552 matrixLayer,
cannam@117 553 waveTrack,
cannam@108 554 this.timeline.timeContext
dev@122 555 );
cannam@108 556 break;
cannam@106 557 }
dev@67 558 default:
cannam@106 559 console.log("Cannot render an appropriate layer for feature shape '" +
cannam@106 560 features.shape + "'");
dev@63 561 }
dev@59 562
dev@56 563 this.timeline.tracks.update();
dev@53 564 }
dev@53 565
dev@53 566 private animate(): void {
dev@31 567 this.ngZone.runOutsideAngular(() => {
dev@31 568 // listen for time passing...
dev@31 569 const updateSeekingCursor = () => {
dev@53 570 const currentTime = this.audioService.getCurrentTime();
dev@53 571 this.cursorLayer.currentPosition = currentTime;
dev@53 572 this.cursorLayer.update();
dev@53 573
dev@53 574 const currentOffset = this.timeline.timeContext.offset;
dev@53 575 const offsetTimestamp = currentOffset
dev@53 576 + currentTime;
dev@53 577
dev@53 578 const visibleDuration = this.timeline.timeContext.visibleDuration;
dev@53 579 // TODO reduce duplication between directions and make more declarative
dev@53 580 // this kinda logic should also be tested
dev@53 581 const mustPageForward = offsetTimestamp > visibleDuration;
dev@53 582 const mustPageBackward = currentTime < -currentOffset;
dev@53 583
dev@53 584 if (mustPageForward) {
dev@53 585 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
dev@53 586
cannam@106 587 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
cannam@106 588 -currentTime + 0.5 * visibleDuration :
cannam@106 589 currentOffset - visibleDuration;
dev@51 590 this.timeline.tracks.update();
dev@34 591 }
dev@53 592
dev@53 593 if (mustPageBackward) {
dev@53 594 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
cannam@106 595 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
cannam@106 596 -currentTime + 0.5 * visibleDuration :
cannam@106 597 currentOffset + visibleDuration;
dev@51 598 this.timeline.tracks.update();
dev@34 599 }
dev@53 600
dev@53 601 if (this.isPlaying)
dev@53 602 requestAnimationFrame(updateSeekingCursor);
dev@31 603 };
dev@31 604 updateSeekingCursor();
dev@31 605 });
dev@6 606 }
dev@16 607
dev@122 608 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
dev@54 609 timeContext.zoom = 1.0;
dev@54 610 if (!layer.timeContext) {
dev@54 611 layer.setTimeContext(isAxis ?
dev@54 612 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
dev@54 613 }
dev@54 614 track.add(layer);
dev@54 615 layer.render();
dev@54 616 layer.update();
dev@122 617 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
dev@112 618 track.$layout.appendChild(this.cursorLayer.$el);
dev@112 619 }
dev@59 620 }
dev@59 621
dev@59 622 private static changeColour(layer: Layer, colour: string): void {
dev@59 623 const butcherShapes = (shape) => {
dev@59 624 shape.install({color: () => colour});
dev@59 625 shape.params.color = colour;
dev@59 626 shape.update(layer._renderingContext, layer.data);
dev@59 627 };
dev@59 628
dev@59 629 layer._$itemCommonShapeMap.forEach(butcherShapes);
dev@59 630 layer._$itemShapeMap.forEach(butcherShapes);
dev@59 631 layer.render();
dev@59 632 layer.update();
dev@54 633 }
dev@54 634
dev@51 635 ngOnDestroy(): void {
dev@51 636 this.featureExtractionSubscription.unsubscribe();
dev@53 637 this.playingStateSubscription.unsubscribe();
dev@53 638 this.seekedSubscription.unsubscribe();
dev@51 639 }
dev@154 640
dev@155 641 seekStart(): void {
dev@155 642 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
dev@157 643 this.offsetOnMouseDown = this.timeline.timeContext.offset;
dev@155 644 }
dev@155 645
dev@155 646 seekEnd(x: number): void {
dev@157 647 const hasSameZoom: boolean = this.zoomOnMouseDown ===
dev@157 648 this.timeline.timeContext.zoom;
dev@157 649 const hasSameOffset: boolean = this.offsetOnMouseDown ===
dev@157 650 this.timeline.timeContext.offset;
dev@157 651 if (hasSameZoom && hasSameOffset) {
dev@155 652 this.seek(x);
dev@155 653 }
dev@155 654 }
dev@155 655
dev@154 656 seek(x: number): void {
dev@154 657 if (this.timeline) {
dev@154 658 const timeContext: any = this.timeline.timeContext;
dev@154 659 this.audioService.seekTo(
dev@154 660 timeContext.timeToPixel.invert(x)- timeContext.offset
dev@154 661 );
dev@154 662 }
dev@154 663 }
dev@6 664 }