annotate src/app/waveform/waveform.component.ts @ 341:684619d3fad5

Allow interacting with highlight layer regardless of whether playback / seeking is supported. Also remove cursor layer when component seeking disabled.
author Lucas Thompson <dev@lucas.im>
date Fri, 19 May 2017 16:18:34 +0100
parents 097d93b11445
children b5f2ee789fb3
rev   line source
dev@10 1 import {
dev@236 2 Component,
dev@236 3 OnInit,
dev@236 4 ViewChild,
dev@236 5 ElementRef,
dev@236 6 Input,
dev@236 7 AfterViewInit,
dev@236 8 NgZone,
dev@236 9 OnDestroy,
dev@236 10 ChangeDetectorRef
dev@10 11 } from '@angular/core';
dev@196 12 import {
dev@196 13 AudioPlayerService, AudioResource,
dev@196 14 AudioResourceError
dev@236 15 } from '../services/audio-player/audio-player.service';
dev@289 16 import wavesUI from 'waves-ui-piper';
dev@63 17 import {
dev@64 18 FeatureExtractionService
dev@236 19 } from '../services/feature-extraction/feature-extraction.service';
dev@236 20 import {Subscription} from 'rxjs/Subscription';
dev@63 21 import {
dev@63 22 FeatureCollection,
cannam@296 23 SimpleResponse,
cannam@299 24 VectorFeature,
cannam@299 25 MatrixFeature,
cannam@299 26 TracksFeature
dev@236 27 } from 'piper/HigherLevelUtilities';
dev@319 28 import {toSeconds, OutputDescriptor} from 'piper';
dev@236 29 import {FeatureList, Feature} from 'piper/Feature';
dev@81 30 import * as Hammer from 'hammerjs';
dev@236 31 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
dev@8 32
dev@54 33 type Layer = any;
dev@54 34 type Track = any;
dev@59 35 type Colour = string;
dev@6 36
dev@268 37
dev@268 38
dev@268 39 function* createColourGenerator(colours) {
cannam@257 40 let index = 0;
dev@268 41 const nColours = colours.length;
cannam@257 42 while (true) {
dev@268 43 yield colours[index = ++index % nColours];
cannam@257 44 }
dev@268 45 }
dev@268 46
dev@268 47 const defaultColourGenerator = createColourGenerator([
dev@268 48 '#0868ac', // "sapphire blue", our waveform / header colour
dev@268 49 '#c33c54', // "brick red"
dev@268 50 '#17bebb', // "tiffany blue"
dev@268 51 '#001021', // "rich black"
dev@268 52 '#fa8334', // "mango tango"
dev@268 53 '#034748' // "deep jungle green"
dev@268 54 ]);
cannam@257 55
dev@319 56 type HigherLevelFeatureShape = 'regions' | 'instants' | 'notes';
dev@319 57 type NoteLikeUnit = 'midi' | 'hz' ;
dev@319 58 interface Note {
dev@319 59 time: number;
dev@319 60 duration: number;
dev@319 61 pitch: number;
dev@319 62 velocity?: number;
dev@319 63 }
dev@319 64
dev@6 65 @Component({
dev@236 66 selector: 'ugly-waveform',
dev@6 67 templateUrl: './waveform.component.html',
dev@6 68 styleUrls: ['./waveform.component.css']
dev@6 69 })
cannam@257 70
dev@51 71 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
dev@20 72
dev@8 73 @ViewChild('track') trackDiv: ElementRef;
dev@285 74 @Input() set width(width: number) {
dev@285 75 if (this.timeline) {
dev@285 76 requestAnimationFrame(() => {
dev@285 77 this.timeline.timeContext.visibleWidth = width;
dev@285 78 this.timeline.tracks.update();
dev@285 79 });
dev@285 80 }
dev@285 81 }
dev@189 82 @Input() timeline: Timeline;
dev@189 83 @Input() trackIdPrefix: string;
dev@196 84 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
dev@196 85 if (isSubscribed) {
dev@196 86 if (this.featureExtractionSubscription) {
dev@196 87 return;
dev@196 88 }
dev@268 89
dev@196 90 this.featureExtractionSubscription =
dev@196 91 this.piperService.featuresExtracted$.subscribe(
dev@196 92 features => {
dev@268 93 this.renderFeatures(features, defaultColourGenerator.next().value);
dev@196 94 });
dev@196 95 } else {
dev@196 96 if (this.featureExtractionSubscription) {
dev@196 97 this.featureExtractionSubscription.unsubscribe();
dev@196 98 }
dev@196 99 }
dev@196 100 }
dev@196 101 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
dev@196 102 this._isSubscribedToAudioService = isSubscribed;
dev@196 103 if (isSubscribed) {
dev@196 104 if (this.onAudioDataSubscription) {
dev@196 105 return;
dev@196 106 }
dev@196 107
dev@196 108 this.onAudioDataSubscription =
dev@196 109 this.audioService.audioLoaded$.subscribe(res => {
dev@196 110 const wasError = (res as AudioResourceError).message != null;
dev@196 111
dev@196 112 if (wasError) {
dev@196 113 console.warn('No audio, display error?');
dev@196 114 } else {
dev@196 115 this.audioBuffer = (res as AudioResource).samples;
dev@196 116 }
dev@196 117 });
dev@196 118 } else {
dev@196 119 if (this.onAudioDataSubscription) {
dev@196 120 this.onAudioDataSubscription.unsubscribe();
dev@196 121 }
dev@196 122 }
dev@196 123 }
dev@196 124
dev@196 125 get isSubscribedToAudioService(): boolean {
dev@196 126 return this._isSubscribedToAudioService;
dev@196 127 }
dev@196 128
dev@196 129 @Input() set isOneShotExtractor(isOneShot: boolean) {
dev@196 130 this._isOneShotExtractor = isOneShot;
dev@196 131 }
dev@196 132
dev@196 133 get isOneShotExtractor(): boolean {
dev@196 134 return this._isOneShotExtractor;
dev@196 135 }
dev@196 136
dev@196 137 @Input() set isSeeking(isSeeking: boolean) {
dev@196 138 this._isSeeking = isSeeking;
dev@196 139 if (isSeeking) {
dev@196 140 if (this.seekedSubscription) {
dev@196 141 return;
dev@196 142 }
dev@236 143 if (this.playingStateSubscription) {
dev@196 144 return;
dev@196 145 }
dev@196 146
dev@196 147 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
dev@337 148 if (!this.audioService.isPlaying()) {
dev@196 149 this.animate();
dev@236 150 }
dev@196 151 });
dev@196 152 this.playingStateSubscription =
dev@196 153 this.audioService.playingStateChange$.subscribe(
dev@196 154 isPlaying => {
dev@337 155 if (isPlaying) {
dev@196 156 this.animate();
dev@236 157 }
dev@196 158 });
dev@196 159 } else {
dev@341 160 if (this.cursorLayer && this.waveTrack) {
dev@341 161 this.waveTrack.remove(this.cursorLayer);
dev@341 162 }
dev@196 163 if (this.playingStateSubscription) {
dev@196 164 this.playingStateSubscription.unsubscribe();
dev@196 165 }
dev@196 166 if (this.seekedSubscription) {
dev@196 167 this.seekedSubscription.unsubscribe();
dev@196 168 }
dev@196 169 }
dev@196 170 }
dev@196 171
dev@196 172 get isSeeking(): boolean {
dev@196 173 return this._isSeeking;
dev@196 174 }
dev@196 175
dev@16 176 set audioBuffer(buffer: AudioBuffer) {
dev@16 177 this._audioBuffer = buffer || undefined;
cannam@117 178 if (this.audioBuffer) {
dev@20 179 this.renderWaveform(this.audioBuffer);
dev@180 180 // this.renderSpectrogram(this.audioBuffer);
cannam@117 181 }
dev@16 182 }
dev@16 183
dev@16 184 get audioBuffer(): AudioBuffer {
dev@16 185 return this._audioBuffer;
dev@16 186 }
dev@16 187
dev@196 188 private _audioBuffer: AudioBuffer;
dev@196 189 private _isSubscribedToAudioService: boolean;
dev@196 190 private _isOneShotExtractor: boolean;
dev@196 191 private _isSeeking: boolean;
dev@196 192 private cursorLayer: any;
cannam@254 193 private highlightLayer: any;
dev@196 194 private layers: Layer[];
dev@51 195 private featureExtractionSubscription: Subscription;
dev@53 196 private playingStateSubscription: Subscription;
dev@53 197 private seekedSubscription: Subscription;
dev@196 198 private onAudioDataSubscription: Subscription;
dev@155 199 private zoomOnMouseDown: number;
dev@157 200 private offsetOnMouseDown: number;
dev@196 201 private hasShot: boolean;
dev@196 202 private isLoading: boolean;
dev@341 203 private waveTrack: Track;
dev@51 204
dev@236 205 private static changeColour(layer: Layer, colour: string): void {
dev@236 206 const butcherShapes = (shape) => {
dev@236 207 shape.install({color: () => colour});
dev@236 208 shape.params.color = colour;
dev@236 209 shape.update(layer._renderingContext, layer.data);
dev@236 210 };
dev@236 211
dev@236 212 layer._$itemCommonShapeMap.forEach(butcherShapes);
dev@236 213 layer._$itemShapeMap.forEach(butcherShapes);
dev@236 214 layer.render();
dev@236 215 layer.update();
dev@236 216 }
dev@236 217
dev@31 218 constructor(private audioService: AudioPlayerService,
dev@51 219 private piperService: FeatureExtractionService,
dev@234 220 private ngZone: NgZone,
dev@234 221 private ref: ChangeDetectorRef) {
dev@196 222 this.isSubscribedToAudioService = true;
dev@196 223 this.isSeeking = true;
dev@185 224 this.layers = [];
dev@196 225 this.audioBuffer = undefined;
dev@54 226 this.timeline = undefined;
dev@54 227 this.cursorLayer = undefined;
cannam@254 228 this.highlightLayer = undefined;
dev@196 229 this.isLoading = true;
dev@51 230 }
dev@51 231
dev@53 232 ngOnInit() {
dev@53 233 }
dev@10 234
dev@10 235 ngAfterViewInit(): void {
dev@236 236 this.trackIdPrefix = this.trackIdPrefix || 'default';
dev@196 237 if (this.timeline) {
dev@196 238 this.renderTimeline(null, true, true);
dev@196 239 } else {
dev@196 240 this.renderTimeline();
dev@196 241 }
dev@20 242 }
dev@20 243
dev@196 244 renderTimeline(duration: number = 1.0,
dev@196 245 useExistingDuration: boolean = false,
dev@196 246 isInitialRender: boolean = false): Timeline {
dev@18 247 const track: HTMLElement = this.trackDiv.nativeElement;
dev@236 248 track.innerHTML = '';
dev@18 249 const height: number = track.getBoundingClientRect().height;
dev@18 250 const width: number = track.getBoundingClientRect().width;
dev@18 251 const pixelsPerSecond = width / duration;
dev@196 252 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
dev@196 253
dev@196 254 if (hasExistingTimeline) {
dev@196 255 if (!useExistingDuration) {
dev@196 256 this.timeline.pixelsPerSecond = pixelsPerSecond;
dev@196 257 this.timeline.visibleWidth = width;
dev@196 258 }
dev@180 259 } else {
dev@180 260 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
dev@180 261 }
dev@341 262 this.waveTrack = this.timeline.createTrack(
dev@196 263 track,
dev@196 264 height,
dev@196 265 `wave-${this.trackIdPrefix}`
dev@196 266 );
dev@196 267 if (isInitialRender && hasExistingTimeline) {
dev@196 268 // time axis
dev@196 269 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@196 270 height: height,
dev@196 271 color: '#b0b0b0'
dev@196 272 });
dev@341 273 this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true);
dev@196 274 this.cursorLayer = new wavesUI.helpers.CursorLayer({
cannam@257 275 height: height,
cannam@257 276 color: '#c33c54'
dev@196 277 });
dev@341 278 this.addLayer(
dev@341 279 this.cursorLayer,
dev@341 280 this.waveTrack,
dev@341 281 this.timeline.timeContext
dev@341 282 );
dev@196 283 }
dev@196 284 if ('ontouchstart' in window) {
dev@196 285 interface Point {
dev@196 286 x: number;
dev@196 287 y: number;
dev@196 288 }
dev@196 289
dev@236 290 let zoomGestureJustEnded = false;
dev@196 291
dev@196 292 const pixelToExponent: Function = wavesUI.utils.scales.linear()
dev@196 293 .domain([0, 100]) // 100px => factor 2
dev@196 294 .range([0, 1]);
dev@196 295
dev@196 296 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
dev@196 297 return Math.pow(
dev@196 298 Math.pow(p2.x - p1.x, 2) +
dev@196 299 Math.pow(p2.y - p1.y, 2), 0.5);
dev@196 300 };
dev@196 301
dev@205 302 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
dev@205 303 return {
dev@205 304 x: 0.5 * (p1.x + p2.x),
dev@205 305 y: 0.5 * (p1.y + p2.y)
dev@205 306 };
dev@205 307 };
dev@205 308
dev@205 309 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
dev@205 310 recognizers: [
dev@205 311 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
dev@205 312 ]
dev@205 313 });
dev@204 314
dev@204 315 // it seems HammerJs binds the event to the window?
dev@204 316 // causing these events to propagate to other components?
dev@204 317 const componentTimeline = this.timeline;
dev@204 318 let initialZoom;
dev@204 319 let initialDistance;
dev@204 320 let offsetAtPanStart;
dev@205 321 let startX;
dev@205 322 let isZooming;
dev@204 323
dev@196 324 const scroll = (ev) => {
dev@236 325 if (ev.center.x - startX === 0) {
dev@236 326 return;
dev@236 327 }
dev@236 328
dev@196 329 if (zoomGestureJustEnded) {
dev@196 330 zoomGestureJustEnded = false;
dev@236 331 console.log('Skip this event: likely a single touch dangling from pinch');
dev@196 332 return;
dev@196 333 }
dev@204 334 componentTimeline.timeContext.offset = offsetAtPanStart +
dev@204 335 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
dev@204 336 componentTimeline.tracks.update();
dev@196 337 };
dev@196 338
dev@196 339 const zoom = (ev) => {
dev@236 340 if (ev.touches.length < 2) {
dev@236 341 return;
dev@236 342 }
dev@236 343
dev@214 344 ev.preventDefault();
dev@204 345 const minZoom = componentTimeline.state.minZoom;
dev@204 346 const maxZoom = componentTimeline.state.maxZoom;
dev@205 347 const p1: Point = {
dev@218 348 x: ev.touches[0].clientX,
dev@218 349 y: ev.touches[0].clientY
dev@205 350 };
dev@205 351 const p2: Point = {
dev@218 352 x: ev.touches[1].clientX,
dev@218 353 y: ev.touches[1].clientY
dev@205 354 };
dev@205 355 const distance = calculateDistance(p1, p2);
dev@205 356 const midPoint = calculateMidPoint(p1, p2);
dev@196 357
dev@196 358 const lastCenterTime =
dev@205 359 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
dev@196 360
dev@204 361 const exponent = pixelToExponent(distance - initialDistance);
dev@204 362 const targetZoom = initialZoom * Math.pow(2, exponent);
dev@196 363
dev@204 364 componentTimeline.timeContext.zoom =
dev@196 365 Math.min(Math.max(targetZoom, minZoom), maxZoom);
dev@196 366
dev@196 367 const newCenterTime =
dev@205 368 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
dev@196 369
dev@204 370 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
dev@204 371 componentTimeline.tracks.update();
dev@196 372 };
dev@205 373 hammertime.on('panstart', (ev) => {
dev@204 374 offsetAtPanStart = componentTimeline.timeContext.offset;
dev@205 375 startX = ev.center.x;
dev@196 376 });
dev@196 377 hammertime.on('panleft', scroll);
dev@196 378 hammertime.on('panright', scroll);
dev@205 379
dev@205 380
dev@205 381 const element: HTMLElement = this.trackDiv.nativeElement;
dev@205 382 element.addEventListener('touchstart', (e) => {
dev@236 383 if (e.touches.length < 2) {
dev@236 384 return;
dev@236 385 }
dev@236 386
dev@205 387 isZooming = true;
dev@204 388 initialZoom = componentTimeline.timeContext.zoom;
dev@196 389
dev@204 390 initialDistance = calculateDistance({
dev@218 391 x: e.touches[0].clientX,
dev@218 392 y: e.touches[0].clientY
dev@196 393 }, {
dev@218 394 x: e.touches[1].clientX,
dev@218 395 y: e.touches[1].clientY
dev@196 396 });
dev@196 397 });
dev@205 398 element.addEventListener('touchend', () => {
dev@205 399 if (isZooming) {
dev@205 400 isZooming = false;
dev@205 401 zoomGestureJustEnded = true;
dev@205 402 }
dev@301 403 });
dev@205 404 element.addEventListener('touchmove', zoom);
dev@196 405 }
dev@189 406 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
dev@189 407 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
dev@54 408 }
dev@18 409
cannam@108 410 estimatePercentile(matrix, percentile) {
cannam@108 411 // our sample is not evenly distributed across the whole data set:
cannam@108 412 // it is guaranteed to include at least one sample from every
cannam@108 413 // column, and could sample some values more than once. But it
cannam@108 414 // should be good enough in most cases (todo: show this)
cannam@109 415 if (matrix.length === 0) {
cannam@109 416 return 0.0;
cannam@109 417 }
cannam@108 418 const w = matrix.length;
cannam@108 419 const h = matrix[0].length;
cannam@108 420 const n = w * h;
cannam@109 421 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
cannam@108 422 let m_per = Math.floor(m / w);
dev@236 423 if (m_per < 1) {
dev@236 424 m_per = 1;
dev@236 425 }
dev@236 426
dev@236 427 const sample = [];
cannam@108 428 for (let x = 0; x < w; ++x) {
cannam@108 429 for (let i = 0; i < m_per; ++i) {
cannam@108 430 const y = Math.floor(Math.random() * h);
cannam@109 431 const value = matrix[x][y];
cannam@109 432 if (!isNaN(value) && value !== Infinity) {
cannam@109 433 sample.push(value);
cannam@109 434 }
cannam@108 435 }
cannam@108 436 }
cannam@109 437 if (sample.length === 0) {
dev@236 438 console.log('WARNING: No samples gathered, even though we hoped for ' +
dev@301 439 (m_per * w) + ' of them');
cannam@109 440 return 0.0;
cannam@109 441 }
dev@236 442 sample.sort((a, b) => { return a - b; });
cannam@108 443 const ix = Math.floor((sample.length * percentile) / 100);
dev@236 444 console.log('Estimating ' + percentile + '-%ile of ' +
dev@301 445 n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix +
dev@301 446 ' of sorted ' + sample.length + '-sample subset');
cannam@108 447 const estimate = sample[ix];
dev@236 448 console.log('Estimate is: ' + estimate + ' (where min sampled value = ' +
dev@301 449 sample[0] + ' and max = ' + sample[sample.length - 1] + ')');
cannam@108 450 return estimate;
cannam@108 451 }
cannam@108 452
cannam@108 453 interpolatingMapper(hexColours) {
cannam@108 454 const colours = hexColours.map(n => {
cannam@108 455 const i = parseInt(n, 16);
cannam@118 456 return [ ((i >> 16) & 255) / 255.0,
dev@301 457 ((i >> 8) & 255) / 255.0,
dev@301 458 ((i) & 255) / 255.0 ];
cannam@108 459 });
cannam@108 460 const last = colours.length - 1;
cannam@108 461 return (value => {
cannam@108 462 const m = value * last;
cannam@108 463 if (m >= last) {
cannam@108 464 return colours[last];
cannam@108 465 }
cannam@108 466 if (m <= 0) {
cannam@108 467 return colours[0];
cannam@108 468 }
cannam@108 469 const base = Math.floor(m);
cannam@108 470 const prop0 = base + 1.0 - m;
cannam@108 471 const prop1 = m - base;
cannam@108 472 const c0 = colours[base];
dev@236 473 const c1 = colours[base + 1];
cannam@118 474 return [ c0[0] * prop0 + c1[0] * prop1,
dev@301 475 c0[1] * prop0 + c1[1] * prop1,
dev@301 476 c0[2] * prop0 + c1[2] * prop1 ];
cannam@108 477 });
cannam@108 478 }
dev@110 479
cannam@108 480 iceMapper() {
dev@236 481 const hexColours = [
cannam@108 482 // Based on ColorBrewer ylGnBu
dev@236 483 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5',
dev@236 484 '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040'
cannam@108 485 ];
cannam@108 486 hexColours.reverse();
cannam@108 487 return this.interpolatingMapper(hexColours);
cannam@108 488 }
dev@110 489
cannam@118 490 hsv2rgb(h, s, v) { // all values in range [0, 1]
cannam@118 491 const i = Math.floor(h * 6);
cannam@118 492 const f = h * 6 - i;
cannam@118 493 const p = v * (1 - s);
cannam@118 494 const q = v * (1 - f * s);
cannam@118 495 const t = v * (1 - (1 - f) * s);
cannam@118 496 let r = 0, g = 0, b = 0;
cannam@118 497 switch (i % 6) {
dev@301 498 case 0: r = v; g = t; b = p; break;
dev@301 499 case 1: r = q; g = v; b = p; break;
dev@301 500 case 2: r = p; g = v; b = t; break;
dev@301 501 case 3: r = p; g = q; b = v; break;
dev@301 502 case 4: r = t; g = p; b = v; break;
dev@301 503 case 5: r = v; g = p; b = q; break;
cannam@118 504 }
cannam@118 505 return [ r, g, b ];
cannam@118 506 }
dev@122 507
cannam@118 508 greenMapper() {
cannam@118 509 const blue = 0.6666;
cannam@118 510 const pieslice = 0.3333;
cannam@118 511 return (value => {
cannam@118 512 const h = blue - value * 2.0 * pieslice;
cannam@118 513 const s = 0.5 + value / 2.0;
cannam@118 514 const v = value;
cannam@118 515 return this.hsv2rgb(h, s, v);
cannam@118 516 });
cannam@118 517 }
cannam@118 518
cannam@118 519 sunsetMapper() {
cannam@118 520 return (value => {
dev@236 521 const r = (value - 0.24) * 2.38;
dev@236 522 const g = (value - 0.64) * 2.777;
cannam@118 523 let b = (3.6 * value);
dev@236 524 if (value > 0.277) {
dev@236 525 b = 2.0 - b;
dev@236 526 }
cannam@118 527 return [ r, g, b ];
cannam@118 528 });
cannam@118 529 }
cannam@118 530
dev@122 531 clearTimeline(): void {
dev@122 532 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
dev@122 533 const timeContextChildren = this.timeline.timeContext._children;
dev@236 534 for (const track of this.timeline.tracks) {
dev@122 535 if (track.layers.length === 0) { continue; }
dev@122 536 const trackLayers = Array.from(track.layers);
dev@122 537 while (trackLayers.length) {
dev@236 538 const layer: Layer = trackLayers.pop();
dev@185 539 if (this.layers.includes(layer)) {
dev@185 540 track.remove(layer);
dev@185 541 this.layers.splice(this.layers.indexOf(layer), 1);
dev@185 542 const index = timeContextChildren.indexOf(layer.timeContext);
dev@185 543 if (index >= 0) {
dev@185 544 timeContextChildren.splice(index, 1);
dev@185 545 }
dev@185 546 layer.destroy();
dev@122 547 }
dev@122 548 }
dev@122 549 }
dev@122 550 }
dev@122 551
dev@54 552 renderWaveform(buffer: AudioBuffer): void {
dev@341 553 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@54 554 if (this.timeline) {
dev@54 555 // resize
dev@54 556 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
dev@55 557
dev@122 558 this.clearTimeline();
dev@59 559
dev@54 560 this.timeline.visibleWidth = width;
dev@54 561 this.timeline.pixelsPerSecond = width / buffer.duration;
dev@341 562 this.waveTrack.height = height;
dev@54 563 } else {
dev@236 564 this.renderTimeline(buffer.duration);
dev@54 565 }
dev@83 566 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
cannam@106 567
dev@18 568 // time axis
dev@18 569 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@18 570 height: height,
cannam@106 571 color: '#b0b0b0'
dev@18 572 });
dev@341 573 this.addLayer(timeAxis, this.waveTrack, this.timeline.timeContext, true);
dev@18 574
cannam@161 575 const nchannels = buffer.numberOfChannels;
cannam@161 576 const totalWaveHeight = height * 0.9;
cannam@161 577 const waveHeight = totalWaveHeight / nchannels;
dev@189 578
cannam@161 579 for (let ch = 0; ch < nchannels; ++ch) {
dev@236 580 console.log('about to construct a waveform layer for channel ' + ch);
cannam@161 581 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
dev@236 582 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
dev@236 583 height: waveHeight,
cannam@257 584 color: '#0868ac',
dev@236 585 channel: ch
cannam@161 586 });
dev@341 587 this.addLayer(waveformLayer, this.waveTrack, this.timeline.timeContext);
cannam@161 588 }
cannam@117 589
dev@53 590 this.cursorLayer = new wavesUI.helpers.CursorLayer({
cannam@257 591 height: height,
cannam@257 592 color: '#c33c54'
dev@31 593 });
dev@341 594 this.addLayer(this.cursorLayer, this.waveTrack, this.timeline.timeContext);
dev@51 595 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
dev@341 596 this.waveTrack.render();
dev@341 597 this.waveTrack.update();
dev@81 598
dev@196 599 this.isLoading = false;
dev@234 600 this.ref.markForCheck();
dev@53 601 this.animate();
dev@53 602 }
dev@53 603
cannam@117 604 renderSpectrogram(buffer: AudioBuffer): void {
cannam@117 605 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
dev@189 606 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
cannam@117 607
dev@129 608 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
cannam@221 609 top: 0,
cannam@221 610 height: height,
cannam@117 611 stepSize: 512,
dev@129 612 blockSize: 1024,
cannam@118 613 normalise: 'none',
cannam@118 614 mapper: this.sunsetMapper()
cannam@117 615 });
cannam@117 616 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
cannam@117 617
cannam@117 618 this.timeline.tracks.update();
cannam@117 619 }
cannam@117 620
cannam@308 621 private addLineLayers(features: VectorFeature[],
cannam@313 622 unit: string,
cannam@308 623 colour: Colour) {
cannam@298 624
cannam@308 625 // Winnow out empty features
cannam@308 626 features = features.filter(feature => (feature.data.length > 0));
dev@316 627
cannam@308 628 // First establish a [min,max] range across all of the features
cannam@308 629 let [min, max] = features.reduce((acc, feature) => {
cannam@308 630 return feature.data.reduce((acc, val) => {
cannam@308 631 const [min, max] = acc;
cannam@308 632 return [Math.min (min, val), Math.max (max, val)];
cannam@308 633 }, acc);
cannam@308 634 }, [Infinity, -Infinity]);
cannam@308 635
dev@316 636 console.log('addLineLayers: ' + features.length + ' non-empty features, overall min = ' + min + ', max = ' + max);
cannam@308 637
cannam@298 638 if (min === Infinity) {
cannam@298 639 min = 0;
cannam@298 640 max = 1;
cannam@298 641 }
cannam@308 642
cannam@298 643 if (min !== min || max !== max) {
dev@316 644 console.log('WARNING: min or max is NaN');
cannam@298 645 min = 0;
cannam@298 646 max = 1;
cannam@298 647 }
cannam@298 648
cannam@298 649 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
cannam@308 650
cannam@308 651 // Now add a line layer for each vector feature
cannam@308 652 const lineLayers = features.map(feature => {
cannam@308 653
cannam@309 654 let duration = 0;
cannam@309 655
cannam@309 656 // Give the plot items positions relative to the start of the
cannam@309 657 // line, rather than relative to absolute time 0. This is
cannam@309 658 // because we'll be setting the layer timeline start property
cannam@309 659 // later on and these will be positioned relative to that
dev@316 660
cannam@308 661 const plotData = [...feature.data].map((val, i) => {
cannam@309 662 const t = i * feature.stepDuration;
cannam@309 663 duration = t + feature.stepDuration;
cannam@308 664 return {
cannam@309 665 cx: t,
cannam@308 666 cy: val
cannam@308 667 };
cannam@308 668 });
dev@316 669
cannam@308 670 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
cannam@308 671 color: colour,
cannam@308 672 height: height,
cannam@308 673 yDomain: [ min, max ]
cannam@308 674 });
cannam@308 675 this.addLayer(
cannam@308 676 lineLayer,
dev@341 677 this.waveTrack,
cannam@308 678 this.timeline.timeContext
cannam@308 679 );
cannam@308 680
cannam@309 681 // Set start and duration so that the highlight layer can use
cannam@309 682 // them to determine which line to draw values from
cannam@309 683 lineLayer.start = feature.startTime;
cannam@309 684 lineLayer.duration = duration;
dev@316 685
cannam@308 686 return lineLayer;
cannam@298 687 });
cannam@309 688
cannam@309 689 // And a single scale layer at left
dev@316 690 // !!! todo: unit in scale layer
cannam@298 691 const scaleLayer = new wavesUI.helpers.ScaleLayer({
cannam@298 692 tickColor: colour,
cannam@298 693 textColor: colour,
cannam@298 694 height: height,
cannam@298 695 yDomain: [ min, max ]
cannam@298 696 });
cannam@298 697 this.addLayer(
cannam@298 698 scaleLayer,
dev@341 699 this.waveTrack,
cannam@298 700 this.timeline.timeContext
cannam@298 701 );
cannam@308 702
cannam@309 703 // And a single highlight layer which uses all of the line layers
cannam@309 704 // as its source material
cannam@308 705 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, {
cannam@298 706 opacity: 0.7,
cannam@298 707 height: height,
cannam@298 708 color: '#c33c54',
cannam@298 709 labelOffset: 38,
cannam@313 710 yDomain: [ min, max ],
cannam@313 711 unit
cannam@298 712 });
cannam@298 713 this.addLayer(
cannam@298 714 this.highlightLayer,
dev@341 715 this.waveTrack,
cannam@298 716 this.timeline.timeContext
cannam@298 717 );
cannam@298 718 }
dev@303 719
dev@53 720 // TODO refactor - this doesn't belong here
dev@64 721 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
dev@196 722 if (this.isOneShotExtractor && !this.hasShot) {
dev@196 723 this.featureExtractionSubscription.unsubscribe();
dev@196 724 this.hasShot = true;
dev@196 725 }
dev@196 726
dev@236 727 if (!extracted.hasOwnProperty('features')
cannam@296 728 || !extracted.hasOwnProperty('outputDescriptor')) {
dev@236 729 return;
dev@236 730 }
dev@236 731 if (!extracted.features.hasOwnProperty('shape')
dev@301 732 || !extracted.features.hasOwnProperty('collected')) {
dev@236 733 return;
dev@236 734 }
dev@64 735 const features: FeatureCollection = (extracted.features as FeatureCollection);
dev@64 736 const outputDescriptor = extracted.outputDescriptor;
dev@196 737 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@64 738
dev@316 739 let unit = '';
cannam@313 740 if (outputDescriptor.configured.hasOwnProperty('unit')) {
cannam@313 741 unit = outputDescriptor.configured.unit;
cannam@313 742 }
cannam@313 743
dev@64 744 // TODO refactor all of this
dev@63 745 switch (features.shape) {
cannam@298 746
cannam@298 747 case 'vector': {
cannam@299 748 const collected = features.collected as VectorFeature;
cannam@313 749 this.addLineLayers([collected], unit, colour);
cannam@296 750 break;
dev@64 751 }
dev@303 752
cannam@308 753 case 'tracks': {
cannam@308 754 const collected = features.collected as TracksFeature;
cannam@313 755 this.addLineLayers(collected, unit, colour);
cannam@308 756 break;
cannam@308 757 }
dev@316 758
dev@64 759 case 'list': {
dev@301 760 const featureData = features.collected as FeatureList;
dev@236 761 if (featureData.length === 0) {
dev@236 762 return;
dev@236 763 }
dev@319 764
dev@64 765 // TODO refactor, this is incomprehensible
dev@319 766 try {
dev@319 767 const featureShape = deduceHigherLevelFeatureShape(
dev@319 768 featureData,
dev@319 769 outputDescriptor
dev@122 770 );
dev@319 771 switch (featureShape) {
dev@319 772 case 'instants':
dev@319 773 const plotData = featureData.map(feature => ({
dev@319 774 time: toSeconds(feature.timestamp),
dev@319 775 label: feature.label
dev@319 776 }));
dev@319 777 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
dev@319 778 height: height,
dev@319 779 color: colour,
dev@319 780 labelPosition: 'bottom',
dev@319 781 shadeSegments: true
dev@67 782 });
dev@319 783 this.addLayer(
dev@319 784 featureLayer,
dev@341 785 this.waveTrack,
dev@319 786 this.timeline.timeContext
dev@319 787 );
dev@319 788 break;
dev@319 789 case 'regions':
dev@319 790 this.renderRegions(
dev@319 791 featureData,
dev@319 792 outputDescriptor,
dev@341 793 this.waveTrack,
dev@319 794 height,
dev@319 795 colour
dev@319 796 );
dev@319 797 break;
dev@319 798 case 'notes':
cannam@333 799 const notes = mapFeaturesToNotes(featureData, outputDescriptor);
cannam@333 800 let [min, max] = notes.reduce((acc, note) => {
cannam@333 801 const [min, max] = acc;
cannam@333 802 return [Math.min (min, note.pitch), Math.max (max, note.pitch)];
cannam@333 803 }, [Infinity, -Infinity]);
cannam@333 804 if (min === Infinity || min < 0 || max < 0) {
cannam@333 805 min = 0;
cannam@333 806 max = 127;
cannam@333 807 }
cannam@333 808 // round min and max to octave boundaries (starting at C as in MIDI)
cannam@333 809 min = 12 * Math.floor(min / 12);
cannam@333 810 max = 12 * Math.ceil(max / 12);
dev@319 811 const pianoRollLayer = new wavesUI.helpers.PianoRollLayer(
cannam@333 812 notes,
cannam@333 813 {height: height, color: colour, yDomain: [min, max] }
dev@319 814 );
dev@319 815 this.addLayer(
dev@319 816 pianoRollLayer,
dev@341 817 this.waveTrack,
dev@319 818 this.timeline.timeContext
dev@319 819 );
dev@319 820 break;
dev@319 821 }
dev@319 822 } catch (e) {
dev@319 823 console.warn(e); // TODO display
dev@319 824 break;
dev@64 825 }
dev@64 826 break;
dev@64 827 }
cannam@106 828 case 'matrix': {
dev@303 829 const collected = features.collected as MatrixFeature;
dev@316 830 const startTime = collected.startTime; // !!! + make use of
cannam@296 831 const stepDuration = collected.stepDuration;
cannam@296 832 const matrixData = collected.data;
dev@335 833
dev@236 834 if (matrixData.length === 0) {
dev@236 835 return;
dev@236 836 }
dev@236 837
dev@236 838 console.log('matrix data length = ' + matrixData.length);
dev@236 839 console.log('height of first column = ' + matrixData[0].length);
cannam@109 840 const targetValue = this.estimatePercentile(matrixData, 95);
cannam@108 841 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
dev@236 842 console.log('setting gain to ' + gain);
cannam@120 843 const matrixEntity =
cannam@120 844 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
dev@301 845 0, // startTime
dev@303 846 stepDuration);
dev@236 847 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
cannam@108 848 gain,
cannam@221 849 top: 0,
cannam@221 850 height: height,
cannam@109 851 normalise: 'none',
cannam@108 852 mapper: this.iceMapper()
cannam@108 853 });
dev@122 854 this.addLayer(
cannam@108 855 matrixLayer,
dev@341 856 this.waveTrack,
cannam@108 857 this.timeline.timeContext
dev@122 858 );
cannam@108 859 break;
cannam@106 860 }
dev@67 861 default:
dev@236 862 console.log(
dev@236 863 `Cannot render an appropriate layer for feature shape '${features.shape}'`
dev@236 864 );
dev@63 865 }
dev@59 866
dev@196 867 this.isLoading = false;
dev@234 868 this.ref.markForCheck();
dev@56 869 this.timeline.tracks.update();
dev@336 870 this.animate();
dev@53 871 }
dev@53 872
dev@53 873 private animate(): void {
dev@236 874 if (!this.isSeeking) {
dev@236 875 return;
dev@236 876 }
dev@196 877
dev@31 878 this.ngZone.runOutsideAngular(() => {
dev@31 879 // listen for time passing...
dev@31 880 const updateSeekingCursor = () => {
dev@53 881 const currentTime = this.audioService.getCurrentTime();
dev@53 882 this.cursorLayer.currentPosition = currentTime;
dev@53 883 this.cursorLayer.update();
dev@53 884
dev@341 885 if (this.highlightLayer) {
cannam@254 886 this.highlightLayer.currentPosition = currentTime;
cannam@254 887 this.highlightLayer.update();
cannam@254 888 }
cannam@254 889
dev@53 890 const currentOffset = this.timeline.timeContext.offset;
dev@53 891 const offsetTimestamp = currentOffset
dev@53 892 + currentTime;
dev@53 893
dev@53 894 const visibleDuration = this.timeline.timeContext.visibleDuration;
dev@53 895 // TODO reduce duplication between directions and make more declarative
dev@53 896 // this kinda logic should also be tested
dev@53 897 const mustPageForward = offsetTimestamp > visibleDuration;
dev@53 898 const mustPageBackward = currentTime < -currentOffset;
dev@53 899
dev@53 900 if (mustPageForward) {
dev@53 901 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
dev@53 902
dev@301 903 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
dev@301 904 -currentTime + 0.5 * visibleDuration :
dev@301 905 currentOffset - visibleDuration;
dev@51 906 this.timeline.tracks.update();
dev@34 907 }
dev@53 908
dev@53 909 if (mustPageBackward) {
dev@53 910 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
dev@301 911 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
dev@301 912 -currentTime + 0.5 * visibleDuration :
dev@301 913 currentOffset + visibleDuration;
dev@51 914 this.timeline.tracks.update();
dev@34 915 }
dev@53 916
dev@337 917 if (this.audioService.isPlaying()) {
dev@53 918 requestAnimationFrame(updateSeekingCursor);
dev@236 919 }
dev@31 920 };
dev@31 921 updateSeekingCursor();
dev@31 922 });
dev@6 923 }
dev@16 924
dev@319 925 // TODO not sure how much of the logic in here is actually sensible w.r.t
dev@319 926 // what it functionally produces
dev@319 927 private renderRegions(featureData: FeatureList,
dev@319 928 outputDescriptor: OutputDescriptor,
dev@319 929 waveTrack: any,
dev@319 930 height: number,
dev@319 931 colour: Colour) {
dev@319 932 console.log('Output is of region type');
dev@319 933 const binCount = outputDescriptor.configured.binCount || 0;
dev@319 934 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
dev@319 935 const getSegmentArgs = () => {
dev@319 936 if (isBarRegion) {
dev@319 937
dev@319 938 // TODO refactor - this is messy
dev@319 939 interface FoldsToNumber<T> {
dev@319 940 reduce(fn: (previousValue: number,
dev@319 941 currentValue: T,
dev@319 942 currentIndex: number,
dev@319 943 array: ArrayLike<T>) => number,
dev@319 944 initialValue?: number): number;
dev@319 945 }
dev@319 946
dev@319 947 // TODO potentially change impl., i.e avoid reduce
dev@319 948 const findMin = <T>(arr: FoldsToNumber<T>,
dev@319 949 getElement: (x: T) => number): number => {
dev@319 950 return arr.reduce(
dev@319 951 (min, val) => Math.min(min, getElement(val)),
dev@319 952 Infinity
dev@319 953 );
dev@319 954 };
dev@319 955
dev@319 956 const findMax = <T>(arr: FoldsToNumber<T>,
dev@319 957 getElement: (x: T) => number): number => {
dev@319 958 return arr.reduce(
dev@319 959 (min, val) => Math.max(min, getElement(val)),
dev@319 960 -Infinity
dev@319 961 );
dev@319 962 };
dev@319 963
dev@319 964 const min = findMin<Feature>(featureData, (x: Feature) => {
dev@319 965 return findMin<number>(x.featureValues, y => y);
dev@319 966 });
dev@319 967
dev@319 968 const max = findMax<Feature>(featureData, (x: Feature) => {
dev@319 969 return findMax<number>(x.featureValues, y => y);
dev@319 970 });
dev@319 971
dev@319 972 const barHeight = 1.0 / height;
dev@319 973 return [
dev@319 974 featureData.reduce((bars, feature) => {
dev@319 975 const staticProperties = {
dev@319 976 x: toSeconds(feature.timestamp),
dev@319 977 width: toSeconds(feature.duration),
dev@319 978 height: min + barHeight,
dev@319 979 color: colour,
dev@319 980 opacity: 0.8
dev@319 981 };
dev@319 982 // TODO avoid copying Float32Array to an array - map is problematic here
dev@319 983 return bars.concat([...feature.featureValues]
dev@319 984 .map(val => Object.assign({}, staticProperties, {y: val})));
dev@319 985 }, []),
dev@319 986 {yDomain: [min, max + barHeight], height: height} as any
dev@319 987 ];
dev@319 988 } else {
dev@319 989 return [featureData.map(feature => ({
dev@319 990 x: toSeconds(feature.timestamp),
dev@319 991 width: toSeconds(feature.duration),
dev@319 992 color: colour,
dev@319 993 opacity: 0.8
dev@319 994 })), {height: height}];
dev@319 995 }
dev@319 996 };
dev@319 997
dev@319 998 const segmentLayer = new wavesUI.helpers.SegmentLayer(
dev@319 999 ...getSegmentArgs()
dev@319 1000 );
dev@319 1001 this.addLayer(
dev@319 1002 segmentLayer,
dev@319 1003 waveTrack,
dev@319 1004 this.timeline.timeContext
dev@319 1005 );
dev@319 1006 }
dev@319 1007
dev@122 1008 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
dev@54 1009 timeContext.zoom = 1.0;
dev@54 1010 if (!layer.timeContext) {
dev@54 1011 layer.setTimeContext(isAxis ?
dev@54 1012 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
dev@54 1013 }
dev@54 1014 track.add(layer);
dev@185 1015 this.layers.push(layer);
dev@54 1016 layer.render();
dev@54 1017 layer.update();
dev@122 1018 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
dev@112 1019 track.$layout.appendChild(this.cursorLayer.$el);
dev@112 1020 }
dev@59 1021 }
dev@59 1022
dev@51 1023 ngOnDestroy(): void {
dev@236 1024 if (this.featureExtractionSubscription) {
dev@196 1025 this.featureExtractionSubscription.unsubscribe();
dev@236 1026 }
dev@236 1027 if (this.playingStateSubscription) {
dev@196 1028 this.playingStateSubscription.unsubscribe();
dev@236 1029 }
dev@236 1030 if (this.seekedSubscription) {
dev@196 1031 this.seekedSubscription.unsubscribe();
dev@236 1032 }
dev@236 1033 if (this.onAudioDataSubscription) {
dev@196 1034 this.onAudioDataSubscription.unsubscribe();
dev@236 1035 }
dev@51 1036 }
dev@154 1037
dev@155 1038 seekStart(): void {
dev@155 1039 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
dev@157 1040 this.offsetOnMouseDown = this.timeline.timeContext.offset;
dev@155 1041 }
dev@155 1042
dev@155 1043 seekEnd(x: number): void {
dev@157 1044 const hasSameZoom: boolean = this.zoomOnMouseDown ===
dev@157 1045 this.timeline.timeContext.zoom;
dev@157 1046 const hasSameOffset: boolean = this.offsetOnMouseDown ===
dev@157 1047 this.timeline.timeContext.offset;
dev@157 1048 if (hasSameZoom && hasSameOffset) {
dev@155 1049 this.seek(x);
dev@155 1050 }
dev@155 1051 }
dev@155 1052
dev@154 1053 seek(x: number): void {
dev@154 1054 if (this.timeline) {
dev@154 1055 const timeContext: any = this.timeline.timeContext;
dev@341 1056 const timeX = timeContext.timeToPixel.invert(x) - timeContext.offset;
dev@196 1057 if (this.isSeeking) {
dev@341 1058 this.audioService.seekTo(timeX);
dev@341 1059 } else {
dev@341 1060 if (this.highlightLayer) {
dev@341 1061 this.highlightLayer.currentPosition = timeX;
dev@341 1062 this.highlightLayer.update();
dev@341 1063 }
dev@196 1064 }
dev@154 1065 }
dev@154 1066 }
dev@6 1067 }
dev@319 1068
dev@319 1069 function deduceHigherLevelFeatureShape(featureData: FeatureList,
dev@319 1070 descriptor: OutputDescriptor)
dev@319 1071 : HigherLevelFeatureShape {
dev@319 1072 // TODO look at output descriptor instead of directly inspecting features
dev@319 1073 const hasDuration = descriptor.configured.hasDuration;
dev@319 1074 const binCount = descriptor.configured.binCount;
dev@319 1075 const isMarker = !hasDuration
dev@319 1076 && binCount === 0
dev@319 1077 && featureData[0].featureValues == null;
dev@319 1078
dev@319 1079 const isMaybeNote = getCanonicalNoteLikeUnit(descriptor.configured.unit)
dev@319 1080 && [1, 2].find(nBins => nBins === binCount);
dev@319 1081
dev@319 1082 const isRegionLike = hasDuration && featureData[0].timestamp != null;
dev@319 1083
dev@319 1084 const isNote = isMaybeNote && isRegionLike;
dev@319 1085 const isRegion = !isMaybeNote && isRegionLike;
dev@319 1086 if (isMarker) {
dev@319 1087 return 'instants';
dev@319 1088 }
dev@319 1089 if (isNote) {
dev@319 1090 return 'notes';
dev@319 1091 }
dev@319 1092 if (isRegion) {
dev@319 1093 return 'regions';
dev@319 1094 }
dev@335 1095 throw new Error('No shape could be deduced');
dev@319 1096 }
dev@319 1097
dev@319 1098 function getCanonicalNoteLikeUnit(unit: string): NoteLikeUnit | null {
dev@319 1099 const canonicalUnits: NoteLikeUnit[] = ['midi', 'hz'];
dev@319 1100 return canonicalUnits.find(canonicalUnit => {
dev@335 1101 return unit.toLowerCase().indexOf(canonicalUnit) >= 0;
dev@319 1102 });
dev@319 1103 }
dev@319 1104
dev@319 1105 function mapFeaturesToNotes(featureData: FeatureList,
dev@319 1106 descriptor: OutputDescriptor): Note[] {
dev@319 1107 const canonicalUnit = getCanonicalNoteLikeUnit(descriptor.configured.unit);
dev@319 1108 const isHz = canonicalUnit === 'hz';
dev@319 1109 return featureData.map(feature => ({
dev@319 1110 time: toSeconds(feature.timestamp),
dev@319 1111 duration: toSeconds(feature.duration),
dev@319 1112 pitch: isHz ?
dev@319 1113 frequencyToMidiNote(feature.featureValues[0]) : feature.featureValues[0]
dev@319 1114 }));
dev@319 1115 }
dev@319 1116
dev@319 1117 function frequencyToMidiNote(frequency: number,
dev@319 1118 concertA: number = 440.0): number {
dev@319 1119 return 69 + 12 * Math.log2(frequency / concertA);
dev@319 1120 }