annotate src/app/waveform/waveform.component.ts @ 343:8bfd9586c78a

Move some functions out of waveform and into appropriately named modules.
author Lucas Thompson <dev@lucas.im>
date Tue, 23 May 2017 10:41:36 +0100
parents b5f2ee789fb3
children
rev   line source
dev@10 1 import {
dev@236 2 Component,
dev@236 3 OnInit,
dev@236 4 ViewChild,
dev@236 5 ElementRef,
dev@236 6 Input,
dev@236 7 AfterViewInit,
dev@236 8 NgZone,
dev@236 9 OnDestroy,
dev@236 10 ChangeDetectorRef
dev@10 11 } from '@angular/core';
dev@196 12 import {
dev@196 13 AudioPlayerService, AudioResource,
dev@196 14 AudioResourceError
dev@236 15 } from '../services/audio-player/audio-player.service';
dev@289 16 import wavesUI from 'waves-ui-piper';
dev@63 17 import {
dev@64 18 FeatureExtractionService
dev@236 19 } from '../services/feature-extraction/feature-extraction.service';
dev@236 20 import {Subscription} from 'rxjs/Subscription';
dev@63 21 import {
dev@63 22 FeatureCollection,
cannam@296 23 SimpleResponse,
cannam@299 24 VectorFeature,
cannam@299 25 MatrixFeature,
cannam@299 26 TracksFeature
dev@236 27 } from 'piper/HigherLevelUtilities';
dev@319 28 import {toSeconds, OutputDescriptor} from 'piper';
dev@236 29 import {FeatureList, Feature} from 'piper/Feature';
dev@81 30 import * as Hammer from 'hammerjs';
dev@236 31 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
dev@343 32 import {iceMapper, sunsetMapper} from 'app/spectrogram/ColourMap';
dev@343 33 import {estimatePercentile} from '../spectrogram/MatrixUtils';
dev@8 34
dev@54 35 type Layer = any;
dev@54 36 type Track = any;
dev@59 37 type Colour = string;
dev@6 38
dev@268 39
dev@268 40
dev@268 41 function* createColourGenerator(colours) {
cannam@257 42 let index = 0;
dev@268 43 const nColours = colours.length;
cannam@257 44 while (true) {
dev@268 45 yield colours[index = ++index % nColours];
cannam@257 46 }
dev@268 47 }
dev@268 48
dev@268 49 const defaultColourGenerator = createColourGenerator([
dev@268 50 '#0868ac', // "sapphire blue", our waveform / header colour
dev@268 51 '#c33c54', // "brick red"
dev@268 52 '#17bebb', // "tiffany blue"
dev@268 53 '#001021', // "rich black"
dev@268 54 '#fa8334', // "mango tango"
dev@268 55 '#034748' // "deep jungle green"
dev@268 56 ]);
cannam@257 57
dev@319 58 type HigherLevelFeatureShape = 'regions' | 'instants' | 'notes';
dev@319 59 type NoteLikeUnit = 'midi' | 'hz' ;
dev@319 60 interface Note {
dev@319 61 time: number;
dev@319 62 duration: number;
dev@319 63 pitch: number;
dev@319 64 velocity?: number;
dev@319 65 }
dev@319 66
dev@6 67 @Component({
dev@236 68 selector: 'ugly-waveform',
dev@6 69 templateUrl: './waveform.component.html',
dev@6 70 styleUrls: ['./waveform.component.css']
dev@6 71 })
cannam@257 72
dev@51 73 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
dev@20 74
dev@8 75 @ViewChild('track') trackDiv: ElementRef;
dev@285 76 @Input() set width(width: number) {
dev@285 77 if (this.timeline) {
dev@285 78 requestAnimationFrame(() => {
dev@285 79 this.timeline.timeContext.visibleWidth = width;
dev@285 80 this.timeline.tracks.update();
dev@285 81 });
dev@285 82 }
dev@285 83 }
dev@189 84 @Input() timeline: Timeline;
dev@189 85 @Input() trackIdPrefix: string;
dev@196 86 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
dev@196 87 if (isSubscribed) {
dev@196 88 if (this.featureExtractionSubscription) {
dev@196 89 return;
dev@196 90 }
dev@268 91
dev@196 92 this.featureExtractionSubscription =
dev@196 93 this.piperService.featuresExtracted$.subscribe(
dev@196 94 features => {
dev@268 95 this.renderFeatures(features, defaultColourGenerator.next().value);
dev@196 96 });
dev@196 97 } else {
dev@196 98 if (this.featureExtractionSubscription) {
dev@196 99 this.featureExtractionSubscription.unsubscribe();
dev@196 100 }
dev@196 101 }
dev@196 102 }
dev@196 103 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
dev@196 104 this._isSubscribedToAudioService = isSubscribed;
dev@196 105 if (isSubscribed) {
dev@196 106 if (this.onAudioDataSubscription) {
dev@196 107 return;
dev@196 108 }
dev@196 109
dev@196 110 this.onAudioDataSubscription =
dev@196 111 this.audioService.audioLoaded$.subscribe(res => {
dev@196 112 const wasError = (res as AudioResourceError).message != null;
dev@196 113
dev@196 114 if (wasError) {
dev@196 115 console.warn('No audio, display error?');
dev@196 116 } else {
dev@196 117 this.audioBuffer = (res as AudioResource).samples;
dev@196 118 }
dev@196 119 });
dev@196 120 } else {
dev@196 121 if (this.onAudioDataSubscription) {
dev@196 122 this.onAudioDataSubscription.unsubscribe();
dev@196 123 }
dev@196 124 }
dev@196 125 }
dev@196 126
dev@196 127 get isSubscribedToAudioService(): boolean {
dev@196 128 return this._isSubscribedToAudioService;
dev@196 129 }
dev@196 130
dev@196 131 @Input() set isOneShotExtractor(isOneShot: boolean) {
dev@196 132 this._isOneShotExtractor = isOneShot;
dev@196 133 }
dev@196 134
dev@196 135 get isOneShotExtractor(): boolean {
dev@196 136 return this._isOneShotExtractor;
dev@196 137 }
dev@196 138
dev@196 139 @Input() set isSeeking(isSeeking: boolean) {
dev@196 140 this._isSeeking = isSeeking;
dev@196 141 if (isSeeking) {
dev@196 142 if (this.seekedSubscription) {
dev@196 143 return;
dev@196 144 }
dev@236 145 if (this.playingStateSubscription) {
dev@196 146 return;
dev@196 147 }
dev@196 148
dev@196 149 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
dev@337 150 if (!this.audioService.isPlaying()) {
dev@196 151 this.animate();
dev@236 152 }
dev@196 153 });
dev@196 154 this.playingStateSubscription =
dev@196 155 this.audioService.playingStateChange$.subscribe(
dev@196 156 isPlaying => {
dev@337 157 if (isPlaying) {
dev@196 158 this.animate();
dev@236 159 }
dev@196 160 });
dev@196 161 } else {
dev@341 162 if (this.cursorLayer && this.waveTrack) {
dev@341 163 this.waveTrack.remove(this.cursorLayer);
dev@341 164 }
dev@196 165 if (this.playingStateSubscription) {
dev@196 166 this.playingStateSubscription.unsubscribe();
dev@196 167 }
dev@196 168 if (this.seekedSubscription) {
dev@196 169 this.seekedSubscription.unsubscribe();
dev@196 170 }
dev@196 171 }
dev@196 172 }
dev@196 173
dev@196 174 get isSeeking(): boolean {
dev@196 175 return this._isSeeking;
dev@196 176 }
dev@196 177
dev@16 178 set audioBuffer(buffer: AudioBuffer) {
dev@16 179 this._audioBuffer = buffer || undefined;
cannam@117 180 if (this.audioBuffer) {
dev@20 181 this.renderWaveform(this.audioBuffer);
dev@180 182 // this.renderSpectrogram(this.audioBuffer);
cannam@117 183 }
dev@16 184 }
dev@16 185
dev@16 186 get audioBuffer(): AudioBuffer {
dev@16 187 return this._audioBuffer;
dev@16 188 }
dev@16 189
dev@196 190 private _audioBuffer: AudioBuffer;
dev@196 191 private _isSubscribedToAudioService: boolean;
dev@196 192 private _isOneShotExtractor: boolean;
dev@196 193 private _isSeeking: boolean;
dev@196 194 private cursorLayer: any;
cannam@254 195 private highlightLayer: any;
dev@196 196 private layers: Layer[];
dev@51 197 private featureExtractionSubscription: Subscription;
dev@53 198 private playingStateSubscription: Subscription;
dev@53 199 private seekedSubscription: Subscription;
dev@196 200 private onAudioDataSubscription: Subscription;
dev@155 201 private zoomOnMouseDown: number;
dev@157 202 private offsetOnMouseDown: number;
dev@196 203 private hasShot: boolean;
dev@196 204 private isLoading: boolean;
dev@341 205 private waveTrack: Track;
dev@51 206
dev@236 207 private static changeColour(layer: Layer, colour: string): void {
dev@236 208 const butcherShapes = (shape) => {
dev@236 209 shape.install({color: () => colour});
dev@236 210 shape.params.color = colour;
dev@236 211 shape.update(layer._renderingContext, layer.data);
dev@236 212 };
dev@236 213
dev@236 214 layer._$itemCommonShapeMap.forEach(butcherShapes);
dev@236 215 layer._$itemShapeMap.forEach(butcherShapes);
dev@236 216 layer.render();
dev@236 217 layer.update();
dev@236 218 }
dev@236 219
dev@31 220 constructor(private audioService: AudioPlayerService,
dev@51 221 private piperService: FeatureExtractionService,
dev@234 222 private ngZone: NgZone,
dev@234 223 private ref: ChangeDetectorRef) {
dev@196 224 this.isSubscribedToAudioService = true;
dev@196 225 this.isSeeking = true;
dev@185 226 this.layers = [];
dev@196 227 this.audioBuffer = undefined;
dev@54 228 this.timeline = undefined;
dev@54 229 this.cursorLayer = undefined;
cannam@254 230 this.highlightLayer = undefined;
dev@196 231 this.isLoading = true;
dev@51 232 }
dev@51 233
dev@53 234 ngOnInit() {
dev@53 235 }
dev@10 236
dev@10 237 ngAfterViewInit(): void {
dev@236 238 this.trackIdPrefix = this.trackIdPrefix || 'default';
dev@196 239 if (this.timeline) {
dev@196 240 this.renderTimeline(null, true, true);
dev@196 241 } else {
dev@196 242 this.renderTimeline();
dev@196 243 }
dev@20 244 }
dev@20 245
dev@196 246 renderTimeline(duration: number = 1.0,
dev@196 247 useExistingDuration: boolean = false,
dev@196 248 isInitialRender: boolean = false): Timeline {
dev@18 249 const track: HTMLElement = this.trackDiv.nativeElement;
dev@236 250 track.innerHTML = '';
dev@18 251 const height: number = track.getBoundingClientRect().height;
dev@18 252 const width: number = track.getBoundingClientRect().width;
dev@18 253 const pixelsPerSecond = width / duration;
dev@196 254 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
dev@196 255
dev@196 256 if (hasExistingTimeline) {
dev@196 257 if (!useExistingDuration) {
dev@196 258 this.timeline.pixelsPerSecond = pixelsPerSecond;
dev@196 259 this.timeline.visibleWidth = width;
dev@196 260 }
dev@180 261 } else {
dev@180 262 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
dev@180 263 }
dev@341 264 this.waveTrack = this.timeline.createTrack(
dev@196 265 track,
dev@196 266 height,
dev@196 267 `wave-${this.trackIdPrefix}`
dev@196 268 );
dev@196 269 if (isInitialRender && hasExistingTimeline) {
dev@196 270 // time axis
dev@196 271 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@196 272 height: height,
dev@196 273 color: '#b0b0b0'
dev@196 274 });
dev@341 275 this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true);
dev@196 276 this.cursorLayer = new wavesUI.helpers.CursorLayer({
cannam@257 277 height: height,
cannam@257 278 color: '#c33c54'
dev@196 279 });
dev@341 280 this.addLayer(
dev@341 281 this.cursorLayer,
dev@341 282 this.waveTrack,
dev@341 283 this.timeline.timeContext
dev@341 284 );
dev@196 285 }
dev@196 286 if ('ontouchstart' in window) {
dev@196 287 interface Point {
dev@196 288 x: number;
dev@196 289 y: number;
dev@196 290 }
dev@196 291
dev@236 292 let zoomGestureJustEnded = false;
dev@196 293
dev@196 294 const pixelToExponent: Function = wavesUI.utils.scales.linear()
dev@196 295 .domain([0, 100]) // 100px => factor 2
dev@196 296 .range([0, 1]);
dev@196 297
dev@196 298 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
dev@196 299 return Math.pow(
dev@196 300 Math.pow(p2.x - p1.x, 2) +
dev@196 301 Math.pow(p2.y - p1.y, 2), 0.5);
dev@196 302 };
dev@196 303
dev@205 304 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
dev@205 305 return {
dev@205 306 x: 0.5 * (p1.x + p2.x),
dev@205 307 y: 0.5 * (p1.y + p2.y)
dev@205 308 };
dev@205 309 };
dev@205 310
dev@205 311 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
dev@205 312 recognizers: [
dev@205 313 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
dev@205 314 ]
dev@205 315 });
dev@204 316
dev@204 317 // it seems HammerJs binds the event to the window?
dev@204 318 // causing these events to propagate to other components?
dev@204 319 const componentTimeline = this.timeline;
dev@204 320 let initialZoom;
dev@204 321 let initialDistance;
dev@204 322 let offsetAtPanStart;
dev@205 323 let startX;
dev@205 324 let isZooming;
dev@204 325
dev@196 326 const scroll = (ev) => {
dev@236 327 if (ev.center.x - startX === 0) {
dev@236 328 return;
dev@236 329 }
dev@236 330
dev@196 331 if (zoomGestureJustEnded) {
dev@196 332 zoomGestureJustEnded = false;
dev@236 333 console.log('Skip this event: likely a single touch dangling from pinch');
dev@196 334 return;
dev@196 335 }
dev@204 336 componentTimeline.timeContext.offset = offsetAtPanStart +
dev@204 337 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
dev@204 338 componentTimeline.tracks.update();
dev@196 339 };
dev@196 340
dev@196 341 const zoom = (ev) => {
dev@236 342 if (ev.touches.length < 2) {
dev@236 343 return;
dev@236 344 }
dev@236 345
dev@214 346 ev.preventDefault();
dev@204 347 const minZoom = componentTimeline.state.minZoom;
dev@204 348 const maxZoom = componentTimeline.state.maxZoom;
dev@205 349 const p1: Point = {
dev@218 350 x: ev.touches[0].clientX,
dev@218 351 y: ev.touches[0].clientY
dev@205 352 };
dev@205 353 const p2: Point = {
dev@218 354 x: ev.touches[1].clientX,
dev@218 355 y: ev.touches[1].clientY
dev@205 356 };
dev@205 357 const distance = calculateDistance(p1, p2);
dev@205 358 const midPoint = calculateMidPoint(p1, p2);
dev@196 359
dev@196 360 const lastCenterTime =
dev@205 361 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
dev@196 362
dev@204 363 const exponent = pixelToExponent(distance - initialDistance);
dev@204 364 const targetZoom = initialZoom * Math.pow(2, exponent);
dev@196 365
dev@204 366 componentTimeline.timeContext.zoom =
dev@196 367 Math.min(Math.max(targetZoom, minZoom), maxZoom);
dev@196 368
dev@196 369 const newCenterTime =
dev@205 370 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
dev@196 371
dev@204 372 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
dev@204 373 componentTimeline.tracks.update();
dev@196 374 };
dev@205 375 hammertime.on('panstart', (ev) => {
dev@204 376 offsetAtPanStart = componentTimeline.timeContext.offset;
dev@205 377 startX = ev.center.x;
dev@196 378 });
dev@196 379 hammertime.on('panleft', scroll);
dev@196 380 hammertime.on('panright', scroll);
dev@205 381
dev@205 382
dev@205 383 const element: HTMLElement = this.trackDiv.nativeElement;
dev@205 384 element.addEventListener('touchstart', (e) => {
dev@236 385 if (e.touches.length < 2) {
dev@236 386 return;
dev@236 387 }
dev@236 388
dev@205 389 isZooming = true;
dev@204 390 initialZoom = componentTimeline.timeContext.zoom;
dev@196 391
dev@204 392 initialDistance = calculateDistance({
dev@218 393 x: e.touches[0].clientX,
dev@218 394 y: e.touches[0].clientY
dev@196 395 }, {
dev@218 396 x: e.touches[1].clientX,
dev@218 397 y: e.touches[1].clientY
dev@196 398 });
dev@196 399 });
dev@205 400 element.addEventListener('touchend', () => {
dev@205 401 if (isZooming) {
dev@205 402 isZooming = false;
dev@205 403 zoomGestureJustEnded = true;
dev@205 404 }
dev@301 405 });
dev@205 406 element.addEventListener('touchmove', zoom);
dev@196 407 }
dev@189 408 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
dev@189 409 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
dev@54 410 }
dev@18 411
dev@122 412 clearTimeline(): void {
dev@122 413 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
dev@122 414 const timeContextChildren = this.timeline.timeContext._children;
dev@236 415 for (const track of this.timeline.tracks) {
dev@122 416 if (track.layers.length === 0) { continue; }
dev@122 417 const trackLayers = Array.from(track.layers);
dev@122 418 while (trackLayers.length) {
dev@236 419 const layer: Layer = trackLayers.pop();
dev@185 420 if (this.layers.includes(layer)) {
dev@185 421 track.remove(layer);
dev@185 422 this.layers.splice(this.layers.indexOf(layer), 1);
dev@185 423 const index = timeContextChildren.indexOf(layer.timeContext);
dev@185 424 if (index >= 0) {
dev@185 425 timeContextChildren.splice(index, 1);
dev@185 426 }
dev@185 427 layer.destroy();
dev@122 428 }
dev@122 429 }
dev@122 430 }
dev@122 431 }
dev@122 432
dev@54 433 renderWaveform(buffer: AudioBuffer): void {
dev@341 434 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@54 435 if (this.timeline) {
dev@54 436 // resize
dev@54 437 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
dev@55 438
dev@122 439 this.clearTimeline();
dev@59 440
dev@54 441 this.timeline.visibleWidth = width;
dev@54 442 this.timeline.pixelsPerSecond = width / buffer.duration;
dev@341 443 this.waveTrack.height = height;
dev@54 444 } else {
dev@236 445 this.renderTimeline(buffer.duration);
dev@54 446 }
dev@83 447 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
cannam@106 448
dev@18 449 // time axis
dev@18 450 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@18 451 height: height,
cannam@106 452 color: '#b0b0b0'
dev@18 453 });
dev@341 454 this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true);
dev@18 455
cannam@161 456 const nchannels = buffer.numberOfChannels;
cannam@161 457 const totalWaveHeight = height * 0.9;
cannam@161 458 const waveHeight = totalWaveHeight / nchannels;
dev@189 459
cannam@161 460 for (let ch = 0; ch < nchannels; ++ch) {
dev@236 461 console.log('about to construct a waveform layer for channel ' + ch);
cannam@161 462 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
dev@236 463 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
dev@236 464 height: waveHeight,
cannam@257 465 color: '#0868ac',
dev@236 466 channel: ch
cannam@161 467 });
dev@341 468 this.addLayer(waveformLayer, this.waveTrack, this.timeline.timeContext);
cannam@161 469 }
cannam@117 470
dev@53 471 this.cursorLayer = new wavesUI.helpers.CursorLayer({
cannam@257 472 height: height,
cannam@257 473 color: '#c33c54'
dev@31 474 });
dev@341 475 this.addLayer(this.cursorLayer, this.waveTrack, this.timeline.timeContext);
dev@51 476 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
dev@341 477 this.waveTrack.render();
dev@341 478 this.waveTrack.update();
dev@81 479
dev@196 480 this.isLoading = false;
dev@234 481 this.ref.markForCheck();
dev@53 482 this.animate();
dev@53 483 }
dev@53 484
cannam@117 485 renderSpectrogram(buffer: AudioBuffer): void {
cannam@117 486 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
dev@189 487 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
cannam@117 488
dev@129 489 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
cannam@221 490 top: 0,
cannam@221 491 height: height,
cannam@117 492 stepSize: 512,
dev@129 493 blockSize: 1024,
cannam@118 494 normalise: 'none',
dev@343 495 mapper: sunsetMapper()
cannam@117 496 });
cannam@117 497 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
cannam@117 498
cannam@117 499 this.timeline.tracks.update();
cannam@117 500 }
cannam@117 501
cannam@308 502 private addLineLayers(features: VectorFeature[],
cannam@313 503 unit: string,
cannam@308 504 colour: Colour) {
cannam@298 505
cannam@308 506 // Winnow out empty features
cannam@308 507 features = features.filter(feature => (feature.data.length > 0));
dev@316 508
cannam@308 509 // First establish a [min,max] range across all of the features
cannam@308 510 let [min, max] = features.reduce((acc, feature) => {
cannam@308 511 return feature.data.reduce((acc, val) => {
cannam@308 512 const [min, max] = acc;
cannam@308 513 return [Math.min (min, val), Math.max (max, val)];
cannam@308 514 }, acc);
cannam@308 515 }, [Infinity, -Infinity]);
cannam@308 516
dev@316 517 console.log('addLineLayers: ' + features.length + ' non-empty features, overall min = ' + min + ', max = ' + max);
cannam@308 518
cannam@298 519 if (min === Infinity) {
cannam@298 520 min = 0;
cannam@298 521 max = 1;
cannam@298 522 }
cannam@308 523
cannam@298 524 if (min !== min || max !== max) {
dev@316 525 console.log('WARNING: min or max is NaN');
cannam@298 526 min = 0;
cannam@298 527 max = 1;
cannam@298 528 }
cannam@298 529
cannam@298 530 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
cannam@308 531
cannam@308 532 // Now add a line layer for each vector feature
cannam@308 533 const lineLayers = features.map(feature => {
cannam@308 534
cannam@309 535 let duration = 0;
cannam@309 536
cannam@309 537 // Give the plot items positions relative to the start of the
cannam@309 538 // line, rather than relative to absolute time 0. This is
cannam@309 539 // because we'll be setting the layer timeline start property
cannam@309 540 // later on and these will be positioned relative to that
dev@316 541
cannam@308 542 const plotData = [...feature.data].map((val, i) => {
cannam@309 543 const t = i * feature.stepDuration;
cannam@309 544 duration = t + feature.stepDuration;
cannam@308 545 return {
cannam@309 546 cx: t,
cannam@308 547 cy: val
cannam@308 548 };
cannam@308 549 });
dev@316 550
cannam@308 551 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
cannam@308 552 color: colour,
cannam@308 553 height: height,
cannam@308 554 yDomain: [ min, max ]
cannam@308 555 });
cannam@308 556 this.addLayer(
cannam@308 557 lineLayer,
dev@341 558 this.waveTrack,
cannam@308 559 this.timeline.timeContext
cannam@308 560 );
cannam@308 561
cannam@309 562 // Set start and duration so that the highlight layer can use
cannam@309 563 // them to determine which line to draw values from
cannam@309 564 lineLayer.start = feature.startTime;
cannam@309 565 lineLayer.duration = duration;
dev@316 566
cannam@308 567 return lineLayer;
cannam@298 568 });
cannam@309 569
dev@343 570 this.addScaleAndHighlight(this.waveTrack, lineLayers, unit, colour, min, max);
cannam@334 571 }
cannam@334 572
cannam@334 573 private addScaleAndHighlight(waveTrack,
cannam@334 574 lineLayers,
cannam@334 575 unit: string,
cannam@334 576 colour: Colour,
cannam@334 577 min: number,
cannam@334 578 max: number) {
cannam@334 579
cannam@334 580 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@343 581
cannam@309 582 // And a single scale layer at left
dev@316 583 // !!! todo: unit in scale layer
cannam@298 584 const scaleLayer = new wavesUI.helpers.ScaleLayer({
cannam@298 585 tickColor: colour,
cannam@298 586 textColor: colour,
cannam@298 587 height: height,
cannam@298 588 yDomain: [ min, max ]
cannam@298 589 });
cannam@298 590 this.addLayer(
cannam@298 591 scaleLayer,
dev@343 592 waveTrack,
cannam@298 593 this.timeline.timeContext
cannam@298 594 );
cannam@308 595
cannam@309 596 // And a single highlight layer which uses all of the line layers
cannam@309 597 // as its source material
cannam@308 598 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, {
cannam@298 599 opacity: 0.7,
cannam@298 600 height: height,
cannam@298 601 color: '#c33c54',
cannam@298 602 labelOffset: 38,
cannam@313 603 yDomain: [ min, max ],
cannam@313 604 unit
cannam@298 605 });
cannam@298 606 this.addLayer(
cannam@298 607 this.highlightLayer,
dev@343 608 waveTrack,
cannam@298 609 this.timeline.timeContext
cannam@298 610 );
cannam@298 611 }
dev@303 612
dev@53 613 // TODO refactor - this doesn't belong here
dev@64 614 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
dev@196 615 if (this.isOneShotExtractor && !this.hasShot) {
dev@196 616 this.featureExtractionSubscription.unsubscribe();
dev@196 617 this.hasShot = true;
dev@196 618 }
dev@196 619
dev@236 620 if (!extracted.hasOwnProperty('features')
cannam@296 621 || !extracted.hasOwnProperty('outputDescriptor')) {
dev@236 622 return;
dev@236 623 }
dev@236 624 if (!extracted.features.hasOwnProperty('shape')
dev@301 625 || !extracted.features.hasOwnProperty('collected')) {
dev@236 626 return;
dev@236 627 }
dev@64 628 const features: FeatureCollection = (extracted.features as FeatureCollection);
dev@64 629 const outputDescriptor = extracted.outputDescriptor;
dev@196 630 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@64 631
dev@316 632 let unit = '';
cannam@313 633 if (outputDescriptor.configured.hasOwnProperty('unit')) {
cannam@313 634 unit = outputDescriptor.configured.unit;
cannam@313 635 }
cannam@313 636
dev@64 637 // TODO refactor all of this
dev@63 638 switch (features.shape) {
cannam@298 639
cannam@298 640 case 'vector': {
cannam@299 641 const collected = features.collected as VectorFeature;
cannam@313 642 this.addLineLayers([collected], unit, colour);
cannam@296 643 break;
dev@64 644 }
dev@303 645
cannam@308 646 case 'tracks': {
cannam@308 647 const collected = features.collected as TracksFeature;
cannam@313 648 this.addLineLayers(collected, unit, colour);
cannam@308 649 break;
cannam@308 650 }
dev@316 651
dev@64 652 case 'list': {
dev@301 653 const featureData = features.collected as FeatureList;
dev@236 654 if (featureData.length === 0) {
dev@236 655 return;
dev@236 656 }
dev@319 657
dev@64 658 // TODO refactor, this is incomprehensible
dev@319 659 try {
dev@319 660 const featureShape = deduceHigherLevelFeatureShape(
dev@319 661 featureData,
dev@319 662 outputDescriptor
dev@122 663 );
dev@319 664 switch (featureShape) {
dev@319 665 case 'instants':
dev@319 666 const plotData = featureData.map(feature => ({
dev@319 667 time: toSeconds(feature.timestamp),
dev@319 668 label: feature.label
dev@319 669 }));
dev@319 670 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
dev@319 671 height: height,
dev@319 672 color: colour,
dev@319 673 labelPosition: 'bottom',
dev@319 674 shadeSegments: true
dev@67 675 });
dev@319 676 this.addLayer(
dev@319 677 featureLayer,
dev@341 678 this.waveTrack,
dev@319 679 this.timeline.timeContext
dev@319 680 );
dev@319 681 break;
dev@319 682 case 'regions':
dev@319 683 this.renderRegions(
dev@319 684 featureData,
dev@319 685 outputDescriptor,
dev@341 686 this.waveTrack,
dev@319 687 height,
dev@319 688 colour
dev@319 689 );
dev@319 690 break;
dev@319 691 case 'notes':
cannam@333 692 const notes = mapFeaturesToNotes(featureData, outputDescriptor);
cannam@333 693 let [min, max] = notes.reduce((acc, note) => {
cannam@333 694 const [min, max] = acc;
cannam@333 695 return [Math.min (min, note.pitch), Math.max (max, note.pitch)];
cannam@333 696 }, [Infinity, -Infinity]);
cannam@333 697 if (min === Infinity || min < 0 || max < 0) {
cannam@333 698 min = 0;
cannam@333 699 max = 127;
cannam@333 700 }
cannam@333 701 // round min and max to octave boundaries (starting at C as in MIDI)
cannam@333 702 min = 12 * Math.floor(min / 12);
cannam@333 703 max = 12 * Math.ceil(max / 12);
dev@319 704 const pianoRollLayer = new wavesUI.helpers.PianoRollLayer(
cannam@333 705 notes,
cannam@333 706 {height: height, color: colour, yDomain: [min, max] }
dev@319 707 );
dev@319 708 this.addLayer(
dev@319 709 pianoRollLayer,
dev@341 710 this.waveTrack,
dev@319 711 this.timeline.timeContext
dev@319 712 );
dev@343 713 this.addScaleAndHighlight(
dev@343 714 this.waveTrack,
dev@343 715 [pianoRollLayer],
dev@343 716 '',
dev@343 717 colour,
dev@343 718 min,
dev@343 719 max
dev@343 720 );
dev@319 721 break;
dev@319 722 }
dev@319 723 } catch (e) {
dev@319 724 console.warn(e); // TODO display
dev@319 725 break;
dev@64 726 }
dev@64 727 break;
dev@64 728 }
cannam@106 729 case 'matrix': {
dev@303 730 const collected = features.collected as MatrixFeature;
dev@316 731 const startTime = collected.startTime; // !!! + make use of
cannam@296 732 const stepDuration = collected.stepDuration;
cannam@296 733 const matrixData = collected.data;
dev@335 734
dev@236 735 if (matrixData.length === 0) {
dev@236 736 return;
dev@236 737 }
dev@236 738
dev@236 739 console.log('matrix data length = ' + matrixData.length);
dev@236 740 console.log('height of first column = ' + matrixData[0].length);
dev@343 741 const targetValue = estimatePercentile(matrixData, 95);
cannam@108 742 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
cannam@120 743 const matrixEntity =
cannam@120 744 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
dev@301 745 0, // startTime
dev@303 746 stepDuration);
dev@236 747 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
cannam@108 748 gain,
cannam@221 749 top: 0,
cannam@221 750 height: height,
cannam@109 751 normalise: 'none',
dev@343 752 mapper: iceMapper()
cannam@108 753 });
dev@122 754 this.addLayer(
cannam@108 755 matrixLayer,
dev@341 756 this.waveTrack,
cannam@108 757 this.timeline.timeContext
dev@122 758 );
cannam@108 759 break;
cannam@106 760 }
dev@67 761 default:
dev@236 762 console.log(
dev@236 763 `Cannot render an appropriate layer for feature shape '${features.shape}'`
dev@236 764 );
dev@63 765 }
dev@59 766
dev@196 767 this.isLoading = false;
dev@234 768 this.ref.markForCheck();
dev@56 769 this.timeline.tracks.update();
dev@336 770 this.animate();
dev@53 771 }
dev@53 772
dev@53 773 private animate(): void {
dev@236 774 if (!this.isSeeking) {
dev@236 775 return;
dev@236 776 }
dev@196 777
dev@31 778 this.ngZone.runOutsideAngular(() => {
dev@31 779 // listen for time passing...
dev@31 780 const updateSeekingCursor = () => {
dev@53 781 const currentTime = this.audioService.getCurrentTime();
dev@53 782 this.cursorLayer.currentPosition = currentTime;
dev@53 783 this.cursorLayer.update();
dev@53 784
dev@341 785 if (this.highlightLayer) {
cannam@254 786 this.highlightLayer.currentPosition = currentTime;
cannam@254 787 this.highlightLayer.update();
cannam@254 788 }
cannam@254 789
dev@53 790 const currentOffset = this.timeline.timeContext.offset;
dev@53 791 const offsetTimestamp = currentOffset
dev@53 792 + currentTime;
dev@53 793
dev@53 794 const visibleDuration = this.timeline.timeContext.visibleDuration;
dev@53 795 // TODO reduce duplication between directions and make more declarative
dev@53 796 // this kinda logic should also be tested
dev@53 797 const mustPageForward = offsetTimestamp > visibleDuration;
dev@53 798 const mustPageBackward = currentTime < -currentOffset;
dev@53 799
dev@53 800 if (mustPageForward) {
dev@53 801 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
dev@53 802
dev@301 803 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
dev@301 804 -currentTime + 0.5 * visibleDuration :
dev@301 805 currentOffset - visibleDuration;
dev@51 806 this.timeline.tracks.update();
dev@34 807 }
dev@53 808
dev@53 809 if (mustPageBackward) {
dev@53 810 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
dev@301 811 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
dev@301 812 -currentTime + 0.5 * visibleDuration :
dev@301 813 currentOffset + visibleDuration;
dev@51 814 this.timeline.tracks.update();
dev@34 815 }
dev@53 816
dev@337 817 if (this.audioService.isPlaying()) {
dev@53 818 requestAnimationFrame(updateSeekingCursor);
dev@236 819 }
dev@31 820 };
dev@31 821 updateSeekingCursor();
dev@31 822 });
dev@6 823 }
dev@16 824
dev@319 825 // TODO not sure how much of the logic in here is actually sensible w.r.t
dev@319 826 // what it functionally produces
dev@319 827 private renderRegions(featureData: FeatureList,
dev@319 828 outputDescriptor: OutputDescriptor,
dev@319 829 waveTrack: any,
dev@319 830 height: number,
dev@319 831 colour: Colour) {
dev@319 832 console.log('Output is of region type');
dev@319 833 const binCount = outputDescriptor.configured.binCount || 0;
dev@319 834 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
dev@319 835 const getSegmentArgs = () => {
dev@319 836 if (isBarRegion) {
dev@319 837
dev@319 838 // TODO refactor - this is messy
dev@319 839 interface FoldsToNumber<T> {
dev@319 840 reduce(fn: (previousValue: number,
dev@319 841 currentValue: T,
dev@319 842 currentIndex: number,
dev@319 843 array: ArrayLike<T>) => number,
dev@319 844 initialValue?: number): number;
dev@319 845 }
dev@319 846
dev@319 847 // TODO potentially change impl., i.e avoid reduce
dev@319 848 const findMin = <T>(arr: FoldsToNumber<T>,
dev@319 849 getElement: (x: T) => number): number => {
dev@319 850 return arr.reduce(
dev@319 851 (min, val) => Math.min(min, getElement(val)),
dev@319 852 Infinity
dev@319 853 );
dev@319 854 };
dev@319 855
dev@319 856 const findMax = <T>(arr: FoldsToNumber<T>,
dev@319 857 getElement: (x: T) => number): number => {
dev@319 858 return arr.reduce(
dev@319 859 (min, val) => Math.max(min, getElement(val)),
dev@319 860 -Infinity
dev@319 861 );
dev@319 862 };
dev@319 863
dev@319 864 const min = findMin<Feature>(featureData, (x: Feature) => {
dev@319 865 return findMin<number>(x.featureValues, y => y);
dev@319 866 });
dev@319 867
dev@319 868 const max = findMax<Feature>(featureData, (x: Feature) => {
dev@319 869 return findMax<number>(x.featureValues, y => y);
dev@319 870 });
dev@319 871
dev@319 872 const barHeight = 1.0 / height;
dev@319 873 return [
dev@319 874 featureData.reduce((bars, feature) => {
dev@319 875 const staticProperties = {
dev@319 876 x: toSeconds(feature.timestamp),
dev@319 877 width: toSeconds(feature.duration),
dev@319 878 height: min + barHeight,
dev@319 879 color: colour,
dev@319 880 opacity: 0.8
dev@319 881 };
dev@319 882 // TODO avoid copying Float32Array to an array - map is problematic here
dev@319 883 return bars.concat([...feature.featureValues]
dev@319 884 .map(val => Object.assign({}, staticProperties, {y: val})));
dev@319 885 }, []),
dev@319 886 {yDomain: [min, max + barHeight], height: height} as any
dev@319 887 ];
dev@319 888 } else {
dev@319 889 return [featureData.map(feature => ({
dev@319 890 x: toSeconds(feature.timestamp),
dev@319 891 width: toSeconds(feature.duration),
dev@319 892 color: colour,
dev@319 893 opacity: 0.8
dev@319 894 })), {height: height}];
dev@319 895 }
dev@319 896 };
dev@319 897
dev@319 898 const segmentLayer = new wavesUI.helpers.SegmentLayer(
dev@319 899 ...getSegmentArgs()
dev@319 900 );
dev@319 901 this.addLayer(
dev@319 902 segmentLayer,
dev@319 903 waveTrack,
dev@319 904 this.timeline.timeContext
dev@319 905 );
dev@319 906 }
dev@319 907
dev@122 908 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
dev@54 909 timeContext.zoom = 1.0;
dev@54 910 if (!layer.timeContext) {
dev@54 911 layer.setTimeContext(isAxis ?
dev@54 912 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
dev@54 913 }
dev@54 914 track.add(layer);
dev@185 915 this.layers.push(layer);
dev@54 916 layer.render();
dev@54 917 layer.update();
dev@122 918 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
dev@112 919 track.$layout.appendChild(this.cursorLayer.$el);
dev@112 920 }
dev@59 921 }
dev@59 922
dev@51 923 ngOnDestroy(): void {
dev@236 924 if (this.featureExtractionSubscription) {
dev@196 925 this.featureExtractionSubscription.unsubscribe();
dev@236 926 }
dev@236 927 if (this.playingStateSubscription) {
dev@196 928 this.playingStateSubscription.unsubscribe();
dev@236 929 }
dev@236 930 if (this.seekedSubscription) {
dev@196 931 this.seekedSubscription.unsubscribe();
dev@236 932 }
dev@236 933 if (this.onAudioDataSubscription) {
dev@196 934 this.onAudioDataSubscription.unsubscribe();
dev@236 935 }
dev@51 936 }
dev@154 937
dev@155 938 seekStart(): void {
dev@155 939 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
dev@157 940 this.offsetOnMouseDown = this.timeline.timeContext.offset;
dev@155 941 }
dev@155 942
dev@155 943 seekEnd(x: number): void {
dev@157 944 const hasSameZoom: boolean = this.zoomOnMouseDown ===
dev@157 945 this.timeline.timeContext.zoom;
dev@157 946 const hasSameOffset: boolean = this.offsetOnMouseDown ===
dev@157 947 this.timeline.timeContext.offset;
dev@157 948 if (hasSameZoom && hasSameOffset) {
dev@155 949 this.seek(x);
dev@155 950 }
dev@155 951 }
dev@155 952
dev@154 953 seek(x: number): void {
dev@154 954 if (this.timeline) {
dev@154 955 const timeContext: any = this.timeline.timeContext;
dev@341 956 const timeX = timeContext.timeToPixel.invert(x) - timeContext.offset;
dev@196 957 if (this.isSeeking) {
dev@341 958 this.audioService.seekTo(timeX);
dev@341 959 } else {
dev@341 960 if (this.highlightLayer) {
dev@341 961 this.highlightLayer.currentPosition = timeX;
dev@341 962 this.highlightLayer.update();
dev@341 963 }
dev@196 964 }
dev@154 965 }
dev@154 966 }
dev@6 967 }
dev@319 968
dev@319 969 function deduceHigherLevelFeatureShape(featureData: FeatureList,
dev@319 970 descriptor: OutputDescriptor)
dev@319 971 : HigherLevelFeatureShape {
dev@319 972 // TODO look at output descriptor instead of directly inspecting features
dev@319 973 const hasDuration = descriptor.configured.hasDuration;
dev@319 974 const binCount = descriptor.configured.binCount;
dev@319 975 const isMarker = !hasDuration
dev@319 976 && binCount === 0
dev@319 977 && featureData[0].featureValues == null;
dev@319 978
dev@319 979 const isMaybeNote = getCanonicalNoteLikeUnit(descriptor.configured.unit)
dev@319 980 && [1, 2].find(nBins => nBins === binCount);
dev@319 981
dev@319 982 const isRegionLike = hasDuration && featureData[0].timestamp != null;
dev@319 983
dev@319 984 const isNote = isMaybeNote && isRegionLike;
dev@319 985 const isRegion = !isMaybeNote && isRegionLike;
dev@319 986 if (isMarker) {
dev@319 987 return 'instants';
dev@319 988 }
dev@319 989 if (isNote) {
dev@319 990 return 'notes';
dev@319 991 }
dev@319 992 if (isRegion) {
dev@319 993 return 'regions';
dev@319 994 }
dev@335 995 throw new Error('No shape could be deduced');
dev@319 996 }
dev@319 997
dev@319 998 function getCanonicalNoteLikeUnit(unit: string): NoteLikeUnit | null {
dev@319 999 const canonicalUnits: NoteLikeUnit[] = ['midi', 'hz'];
dev@319 1000 return canonicalUnits.find(canonicalUnit => {
dev@335 1001 return unit.toLowerCase().indexOf(canonicalUnit) >= 0;
dev@319 1002 });
dev@319 1003 }
dev@319 1004
dev@319 1005 function mapFeaturesToNotes(featureData: FeatureList,
dev@319 1006 descriptor: OutputDescriptor): Note[] {
dev@319 1007 const canonicalUnit = getCanonicalNoteLikeUnit(descriptor.configured.unit);
dev@319 1008 const isHz = canonicalUnit === 'hz';
dev@319 1009 return featureData.map(feature => ({
dev@319 1010 time: toSeconds(feature.timestamp),
dev@319 1011 duration: toSeconds(feature.duration),
dev@319 1012 pitch: isHz ?
dev@319 1013 frequencyToMidiNote(feature.featureValues[0]) : feature.featureValues[0]
dev@319 1014 }));
dev@319 1015 }
dev@319 1016
dev@319 1017 function frequencyToMidiNote(frequency: number,
dev@319 1018 concertA: number = 440.0): number {
dev@319 1019 return 69 + 12 * Math.log2(frequency / concertA);
dev@319 1020 }