annotate src/app/waveform/waveform.component.ts @ 336:9f6131d22bec

Call animate when rendering a feature so that the play head animates.
author Lucas Thompson <dev@lucas.im>
date Fri, 19 May 2017 14:14:33 +0100
parents e213fdc33f46
children 097d93b11445
rev   line source
dev@10 1 import {
dev@236 2 Component,
dev@236 3 OnInit,
dev@236 4 ViewChild,
dev@236 5 ElementRef,
dev@236 6 Input,
dev@236 7 AfterViewInit,
dev@236 8 NgZone,
dev@236 9 OnDestroy,
dev@236 10 ChangeDetectorRef
dev@10 11 } from '@angular/core';
dev@196 12 import {
dev@196 13 AudioPlayerService, AudioResource,
dev@196 14 AudioResourceError
dev@236 15 } from '../services/audio-player/audio-player.service';
dev@289 16 import wavesUI from 'waves-ui-piper';
dev@63 17 import {
dev@64 18 FeatureExtractionService
dev@236 19 } from '../services/feature-extraction/feature-extraction.service';
dev@236 20 import {Subscription} from 'rxjs/Subscription';
dev@63 21 import {
dev@63 22 FeatureCollection,
cannam@296 23 SimpleResponse,
cannam@299 24 VectorFeature,
cannam@299 25 MatrixFeature,
cannam@299 26 TracksFeature
dev@236 27 } from 'piper/HigherLevelUtilities';
dev@319 28 import {toSeconds, OutputDescriptor} from 'piper';
dev@236 29 import {FeatureList, Feature} from 'piper/Feature';
dev@81 30 import * as Hammer from 'hammerjs';
dev@236 31 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
dev@8 32
dev@54 33 type Layer = any;
dev@54 34 type Track = any;
dev@59 35 type Colour = string;
dev@6 36
dev@268 37
dev@268 38
dev@268 39 function* createColourGenerator(colours) {
cannam@257 40 let index = 0;
dev@268 41 const nColours = colours.length;
cannam@257 42 while (true) {
dev@268 43 yield colours[index = ++index % nColours];
cannam@257 44 }
dev@268 45 }
dev@268 46
dev@268 47 const defaultColourGenerator = createColourGenerator([
dev@268 48 '#0868ac', // "sapphire blue", our waveform / header colour
dev@268 49 '#c33c54', // "brick red"
dev@268 50 '#17bebb', // "tiffany blue"
dev@268 51 '#001021', // "rich black"
dev@268 52 '#fa8334', // "mango tango"
dev@268 53 '#034748' // "deep jungle green"
dev@268 54 ]);
cannam@257 55
dev@319 56 type HigherLevelFeatureShape = 'regions' | 'instants' | 'notes';
dev@319 57 type NoteLikeUnit = 'midi' | 'hz' ;
dev@319 58 interface Note {
dev@319 59 time: number;
dev@319 60 duration: number;
dev@319 61 pitch: number;
dev@319 62 velocity?: number;
dev@319 63 }
dev@319 64
dev@6 65 @Component({
dev@236 66 selector: 'ugly-waveform',
dev@6 67 templateUrl: './waveform.component.html',
dev@6 68 styleUrls: ['./waveform.component.css']
dev@6 69 })
cannam@257 70
dev@51 71 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
dev@20 72
dev@8 73 @ViewChild('track') trackDiv: ElementRef;
dev@285 74 @Input() set width(width: number) {
dev@285 75 if (this.timeline) {
dev@285 76 requestAnimationFrame(() => {
dev@285 77 this.timeline.timeContext.visibleWidth = width;
dev@285 78 this.timeline.tracks.update();
dev@285 79 });
dev@285 80 }
dev@285 81 }
dev@189 82 @Input() timeline: Timeline;
dev@189 83 @Input() trackIdPrefix: string;
dev@196 84 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
dev@196 85 if (isSubscribed) {
dev@196 86 if (this.featureExtractionSubscription) {
dev@196 87 return;
dev@196 88 }
dev@268 89
dev@196 90 this.featureExtractionSubscription =
dev@196 91 this.piperService.featuresExtracted$.subscribe(
dev@196 92 features => {
dev@268 93 this.renderFeatures(features, defaultColourGenerator.next().value);
dev@196 94 });
dev@196 95 } else {
dev@196 96 if (this.featureExtractionSubscription) {
dev@196 97 this.featureExtractionSubscription.unsubscribe();
dev@196 98 }
dev@196 99 }
dev@196 100 }
dev@196 101 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
dev@196 102 this._isSubscribedToAudioService = isSubscribed;
dev@196 103 if (isSubscribed) {
dev@196 104 if (this.onAudioDataSubscription) {
dev@196 105 return;
dev@196 106 }
dev@196 107
dev@196 108 this.onAudioDataSubscription =
dev@196 109 this.audioService.audioLoaded$.subscribe(res => {
dev@196 110 const wasError = (res as AudioResourceError).message != null;
dev@196 111
dev@196 112 if (wasError) {
dev@196 113 console.warn('No audio, display error?');
dev@196 114 } else {
dev@196 115 this.audioBuffer = (res as AudioResource).samples;
dev@196 116 }
dev@196 117 });
dev@196 118 } else {
dev@196 119 if (this.onAudioDataSubscription) {
dev@196 120 this.onAudioDataSubscription.unsubscribe();
dev@196 121 }
dev@196 122 }
dev@196 123 }
dev@196 124
dev@196 125 get isSubscribedToAudioService(): boolean {
dev@196 126 return this._isSubscribedToAudioService;
dev@196 127 }
dev@196 128
dev@196 129 @Input() set isOneShotExtractor(isOneShot: boolean) {
dev@196 130 this._isOneShotExtractor = isOneShot;
dev@196 131 }
dev@196 132
dev@196 133 get isOneShotExtractor(): boolean {
dev@196 134 return this._isOneShotExtractor;
dev@196 135 }
dev@196 136
dev@196 137 @Input() set isSeeking(isSeeking: boolean) {
dev@196 138 this._isSeeking = isSeeking;
dev@196 139 if (isSeeking) {
dev@196 140 if (this.seekedSubscription) {
dev@196 141 return;
dev@196 142 }
dev@236 143 if (this.playingStateSubscription) {
dev@196 144 return;
dev@196 145 }
dev@196 146
dev@196 147 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
dev@236 148 if (!this.isPlaying) {
dev@196 149 this.animate();
dev@236 150 }
dev@196 151 });
dev@196 152 this.playingStateSubscription =
dev@196 153 this.audioService.playingStateChange$.subscribe(
dev@196 154 isPlaying => {
dev@196 155 this.isPlaying = isPlaying;
dev@236 156 if (this.isPlaying) {
dev@196 157 this.animate();
dev@236 158 }
dev@196 159 });
dev@196 160 } else {
dev@196 161 if (this.isPlaying) {
dev@196 162 this.isPlaying = false;
dev@196 163 }
dev@196 164 if (this.playingStateSubscription) {
dev@196 165 this.playingStateSubscription.unsubscribe();
dev@196 166 }
dev@196 167 if (this.seekedSubscription) {
dev@196 168 this.seekedSubscription.unsubscribe();
dev@196 169 }
dev@196 170 }
dev@196 171 }
dev@196 172
dev@196 173 get isSeeking(): boolean {
dev@196 174 return this._isSeeking;
dev@196 175 }
dev@196 176
dev@16 177 set audioBuffer(buffer: AudioBuffer) {
dev@16 178 this._audioBuffer = buffer || undefined;
cannam@117 179 if (this.audioBuffer) {
dev@20 180 this.renderWaveform(this.audioBuffer);
dev@180 181 // this.renderSpectrogram(this.audioBuffer);
cannam@117 182 }
dev@16 183 }
dev@16 184
dev@16 185 get audioBuffer(): AudioBuffer {
dev@16 186 return this._audioBuffer;
dev@16 187 }
dev@16 188
dev@196 189 private _audioBuffer: AudioBuffer;
dev@196 190 private _isSubscribedToAudioService: boolean;
dev@196 191 private _isOneShotExtractor: boolean;
dev@196 192 private _isSeeking: boolean;
dev@196 193 private cursorLayer: any;
cannam@254 194 private highlightLayer: any;
dev@196 195 private layers: Layer[];
dev@51 196 private featureExtractionSubscription: Subscription;
dev@53 197 private playingStateSubscription: Subscription;
dev@53 198 private seekedSubscription: Subscription;
dev@196 199 private onAudioDataSubscription: Subscription;
dev@53 200 private isPlaying: boolean;
dev@155 201 private zoomOnMouseDown: number;
dev@157 202 private offsetOnMouseDown: number;
dev@196 203 private hasShot: boolean;
dev@196 204 private isLoading: boolean;
dev@51 205
dev@236 206 private static changeColour(layer: Layer, colour: string): void {
dev@236 207 const butcherShapes = (shape) => {
dev@236 208 shape.install({color: () => colour});
dev@236 209 shape.params.color = colour;
dev@236 210 shape.update(layer._renderingContext, layer.data);
dev@236 211 };
dev@236 212
dev@236 213 layer._$itemCommonShapeMap.forEach(butcherShapes);
dev@236 214 layer._$itemShapeMap.forEach(butcherShapes);
dev@236 215 layer.render();
dev@236 216 layer.update();
dev@236 217 }
dev@236 218
dev@31 219 constructor(private audioService: AudioPlayerService,
dev@51 220 private piperService: FeatureExtractionService,
dev@234 221 private ngZone: NgZone,
dev@234 222 private ref: ChangeDetectorRef) {
dev@196 223 this.isSubscribedToAudioService = true;
dev@196 224 this.isSeeking = true;
dev@185 225 this.layers = [];
dev@196 226 this.audioBuffer = undefined;
dev@54 227 this.timeline = undefined;
dev@54 228 this.cursorLayer = undefined;
cannam@254 229 this.highlightLayer = undefined;
dev@336 230 this.isPlaying = this.audioService.isPlaying();
dev@196 231 this.isLoading = true;
dev@51 232 }
dev@51 233
dev@53 234 ngOnInit() {
dev@53 235 }
dev@10 236
dev@10 237 ngAfterViewInit(): void {
dev@236 238 this.trackIdPrefix = this.trackIdPrefix || 'default';
dev@196 239 if (this.timeline) {
dev@196 240 this.renderTimeline(null, true, true);
dev@196 241 } else {
dev@196 242 this.renderTimeline();
dev@196 243 }
dev@20 244 }
dev@20 245
dev@196 246 renderTimeline(duration: number = 1.0,
dev@196 247 useExistingDuration: boolean = false,
dev@196 248 isInitialRender: boolean = false): Timeline {
dev@18 249 const track: HTMLElement = this.trackDiv.nativeElement;
dev@236 250 track.innerHTML = '';
dev@18 251 const height: number = track.getBoundingClientRect().height;
dev@18 252 const width: number = track.getBoundingClientRect().width;
dev@18 253 const pixelsPerSecond = width / duration;
dev@196 254 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
dev@196 255
dev@196 256 if (hasExistingTimeline) {
dev@196 257 if (!useExistingDuration) {
dev@196 258 this.timeline.pixelsPerSecond = pixelsPerSecond;
dev@196 259 this.timeline.visibleWidth = width;
dev@196 260 }
dev@180 261 } else {
dev@180 262 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
dev@180 263 }
dev@196 264 const waveTrack = this.timeline.createTrack(
dev@196 265 track,
dev@196 266 height,
dev@196 267 `wave-${this.trackIdPrefix}`
dev@196 268 );
dev@196 269 if (isInitialRender && hasExistingTimeline) {
dev@196 270 // time axis
dev@196 271 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@196 272 height: height,
dev@196 273 color: '#b0b0b0'
dev@196 274 });
dev@196 275 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
dev@196 276 this.cursorLayer = new wavesUI.helpers.CursorLayer({
cannam@257 277 height: height,
cannam@257 278 color: '#c33c54'
dev@196 279 });
dev@196 280 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
dev@196 281 }
dev@196 282 if ('ontouchstart' in window) {
dev@196 283 interface Point {
dev@196 284 x: number;
dev@196 285 y: number;
dev@196 286 }
dev@196 287
dev@236 288 let zoomGestureJustEnded = false;
dev@196 289
dev@196 290 const pixelToExponent: Function = wavesUI.utils.scales.linear()
dev@196 291 .domain([0, 100]) // 100px => factor 2
dev@196 292 .range([0, 1]);
dev@196 293
dev@196 294 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
dev@196 295 return Math.pow(
dev@196 296 Math.pow(p2.x - p1.x, 2) +
dev@196 297 Math.pow(p2.y - p1.y, 2), 0.5);
dev@196 298 };
dev@196 299
dev@205 300 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
dev@205 301 return {
dev@205 302 x: 0.5 * (p1.x + p2.x),
dev@205 303 y: 0.5 * (p1.y + p2.y)
dev@205 304 };
dev@205 305 };
dev@205 306
dev@205 307 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
dev@205 308 recognizers: [
dev@205 309 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
dev@205 310 ]
dev@205 311 });
dev@204 312
dev@204 313 // it seems HammerJs binds the event to the window?
dev@204 314 // causing these events to propagate to other components?
dev@204 315 const componentTimeline = this.timeline;
dev@204 316 let initialZoom;
dev@204 317 let initialDistance;
dev@204 318 let offsetAtPanStart;
dev@205 319 let startX;
dev@205 320 let isZooming;
dev@204 321
dev@196 322 const scroll = (ev) => {
dev@236 323 if (ev.center.x - startX === 0) {
dev@236 324 return;
dev@236 325 }
dev@236 326
dev@196 327 if (zoomGestureJustEnded) {
dev@196 328 zoomGestureJustEnded = false;
dev@236 329 console.log('Skip this event: likely a single touch dangling from pinch');
dev@196 330 return;
dev@196 331 }
dev@204 332 componentTimeline.timeContext.offset = offsetAtPanStart +
dev@204 333 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
dev@204 334 componentTimeline.tracks.update();
dev@196 335 };
dev@196 336
dev@196 337 const zoom = (ev) => {
dev@236 338 if (ev.touches.length < 2) {
dev@236 339 return;
dev@236 340 }
dev@236 341
dev@214 342 ev.preventDefault();
dev@204 343 const minZoom = componentTimeline.state.minZoom;
dev@204 344 const maxZoom = componentTimeline.state.maxZoom;
dev@205 345 const p1: Point = {
dev@218 346 x: ev.touches[0].clientX,
dev@218 347 y: ev.touches[0].clientY
dev@205 348 };
dev@205 349 const p2: Point = {
dev@218 350 x: ev.touches[1].clientX,
dev@218 351 y: ev.touches[1].clientY
dev@205 352 };
dev@205 353 const distance = calculateDistance(p1, p2);
dev@205 354 const midPoint = calculateMidPoint(p1, p2);
dev@196 355
dev@196 356 const lastCenterTime =
dev@205 357 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
dev@196 358
dev@204 359 const exponent = pixelToExponent(distance - initialDistance);
dev@204 360 const targetZoom = initialZoom * Math.pow(2, exponent);
dev@196 361
dev@204 362 componentTimeline.timeContext.zoom =
dev@196 363 Math.min(Math.max(targetZoom, minZoom), maxZoom);
dev@196 364
dev@196 365 const newCenterTime =
dev@205 366 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
dev@196 367
dev@204 368 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
dev@204 369 componentTimeline.tracks.update();
dev@196 370 };
dev@205 371 hammertime.on('panstart', (ev) => {
dev@204 372 offsetAtPanStart = componentTimeline.timeContext.offset;
dev@205 373 startX = ev.center.x;
dev@196 374 });
dev@196 375 hammertime.on('panleft', scroll);
dev@196 376 hammertime.on('panright', scroll);
dev@205 377
dev@205 378
dev@205 379 const element: HTMLElement = this.trackDiv.nativeElement;
dev@205 380 element.addEventListener('touchstart', (e) => {
dev@236 381 if (e.touches.length < 2) {
dev@236 382 return;
dev@236 383 }
dev@236 384
dev@205 385 isZooming = true;
dev@204 386 initialZoom = componentTimeline.timeContext.zoom;
dev@196 387
dev@204 388 initialDistance = calculateDistance({
dev@218 389 x: e.touches[0].clientX,
dev@218 390 y: e.touches[0].clientY
dev@196 391 }, {
dev@218 392 x: e.touches[1].clientX,
dev@218 393 y: e.touches[1].clientY
dev@196 394 });
dev@196 395 });
dev@205 396 element.addEventListener('touchend', () => {
dev@205 397 if (isZooming) {
dev@205 398 isZooming = false;
dev@205 399 zoomGestureJustEnded = true;
dev@205 400 }
dev@301 401 });
dev@205 402 element.addEventListener('touchmove', zoom);
dev@196 403 }
dev@189 404 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
dev@189 405 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
dev@54 406 }
dev@18 407
cannam@108 408 estimatePercentile(matrix, percentile) {
cannam@108 409 // our sample is not evenly distributed across the whole data set:
cannam@108 410 // it is guaranteed to include at least one sample from every
cannam@108 411 // column, and could sample some values more than once. But it
cannam@108 412 // should be good enough in most cases (todo: show this)
cannam@109 413 if (matrix.length === 0) {
cannam@109 414 return 0.0;
cannam@109 415 }
cannam@108 416 const w = matrix.length;
cannam@108 417 const h = matrix[0].length;
cannam@108 418 const n = w * h;
cannam@109 419 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
cannam@108 420 let m_per = Math.floor(m / w);
dev@236 421 if (m_per < 1) {
dev@236 422 m_per = 1;
dev@236 423 }
dev@236 424
dev@236 425 const sample = [];
cannam@108 426 for (let x = 0; x < w; ++x) {
cannam@108 427 for (let i = 0; i < m_per; ++i) {
cannam@108 428 const y = Math.floor(Math.random() * h);
cannam@109 429 const value = matrix[x][y];
cannam@109 430 if (!isNaN(value) && value !== Infinity) {
cannam@109 431 sample.push(value);
cannam@109 432 }
cannam@108 433 }
cannam@108 434 }
cannam@109 435 if (sample.length === 0) {
dev@236 436 console.log('WARNING: No samples gathered, even though we hoped for ' +
dev@301 437 (m_per * w) + ' of them');
cannam@109 438 return 0.0;
cannam@109 439 }
dev@236 440 sample.sort((a, b) => { return a - b; });
cannam@108 441 const ix = Math.floor((sample.length * percentile) / 100);
dev@236 442 console.log('Estimating ' + percentile + '-%ile of ' +
dev@301 443 n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix +
dev@301 444 ' of sorted ' + sample.length + '-sample subset');
cannam@108 445 const estimate = sample[ix];
dev@236 446 console.log('Estimate is: ' + estimate + ' (where min sampled value = ' +
dev@301 447 sample[0] + ' and max = ' + sample[sample.length - 1] + ')');
cannam@108 448 return estimate;
cannam@108 449 }
cannam@108 450
cannam@108 451 interpolatingMapper(hexColours) {
cannam@108 452 const colours = hexColours.map(n => {
cannam@108 453 const i = parseInt(n, 16);
cannam@118 454 return [ ((i >> 16) & 255) / 255.0,
dev@301 455 ((i >> 8) & 255) / 255.0,
dev@301 456 ((i) & 255) / 255.0 ];
cannam@108 457 });
cannam@108 458 const last = colours.length - 1;
cannam@108 459 return (value => {
cannam@108 460 const m = value * last;
cannam@108 461 if (m >= last) {
cannam@108 462 return colours[last];
cannam@108 463 }
cannam@108 464 if (m <= 0) {
cannam@108 465 return colours[0];
cannam@108 466 }
cannam@108 467 const base = Math.floor(m);
cannam@108 468 const prop0 = base + 1.0 - m;
cannam@108 469 const prop1 = m - base;
cannam@108 470 const c0 = colours[base];
dev@236 471 const c1 = colours[base + 1];
cannam@118 472 return [ c0[0] * prop0 + c1[0] * prop1,
dev@301 473 c0[1] * prop0 + c1[1] * prop1,
dev@301 474 c0[2] * prop0 + c1[2] * prop1 ];
cannam@108 475 });
cannam@108 476 }
dev@110 477
cannam@108 478 iceMapper() {
dev@236 479 const hexColours = [
cannam@108 480 // Based on ColorBrewer ylGnBu
dev@236 481 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5',
dev@236 482 '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040'
cannam@108 483 ];
cannam@108 484 hexColours.reverse();
cannam@108 485 return this.interpolatingMapper(hexColours);
cannam@108 486 }
dev@110 487
cannam@118 488 hsv2rgb(h, s, v) { // all values in range [0, 1]
cannam@118 489 const i = Math.floor(h * 6);
cannam@118 490 const f = h * 6 - i;
cannam@118 491 const p = v * (1 - s);
cannam@118 492 const q = v * (1 - f * s);
cannam@118 493 const t = v * (1 - (1 - f) * s);
cannam@118 494 let r = 0, g = 0, b = 0;
cannam@118 495 switch (i % 6) {
dev@301 496 case 0: r = v; g = t; b = p; break;
dev@301 497 case 1: r = q; g = v; b = p; break;
dev@301 498 case 2: r = p; g = v; b = t; break;
dev@301 499 case 3: r = p; g = q; b = v; break;
dev@301 500 case 4: r = t; g = p; b = v; break;
dev@301 501 case 5: r = v; g = p; b = q; break;
cannam@118 502 }
cannam@118 503 return [ r, g, b ];
cannam@118 504 }
dev@122 505
cannam@118 506 greenMapper() {
cannam@118 507 const blue = 0.6666;
cannam@118 508 const pieslice = 0.3333;
cannam@118 509 return (value => {
cannam@118 510 const h = blue - value * 2.0 * pieslice;
cannam@118 511 const s = 0.5 + value / 2.0;
cannam@118 512 const v = value;
cannam@118 513 return this.hsv2rgb(h, s, v);
cannam@118 514 });
cannam@118 515 }
cannam@118 516
cannam@118 517 sunsetMapper() {
cannam@118 518 return (value => {
dev@236 519 const r = (value - 0.24) * 2.38;
dev@236 520 const g = (value - 0.64) * 2.777;
cannam@118 521 let b = (3.6 * value);
dev@236 522 if (value > 0.277) {
dev@236 523 b = 2.0 - b;
dev@236 524 }
cannam@118 525 return [ r, g, b ];
cannam@118 526 });
cannam@118 527 }
cannam@118 528
dev@122 529 clearTimeline(): void {
dev@122 530 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
dev@122 531 const timeContextChildren = this.timeline.timeContext._children;
dev@236 532 for (const track of this.timeline.tracks) {
dev@122 533 if (track.layers.length === 0) { continue; }
dev@122 534 const trackLayers = Array.from(track.layers);
dev@122 535 while (trackLayers.length) {
dev@236 536 const layer: Layer = trackLayers.pop();
dev@185 537 if (this.layers.includes(layer)) {
dev@185 538 track.remove(layer);
dev@185 539 this.layers.splice(this.layers.indexOf(layer), 1);
dev@185 540 const index = timeContextChildren.indexOf(layer.timeContext);
dev@185 541 if (index >= 0) {
dev@185 542 timeContextChildren.splice(index, 1);
dev@185 543 }
dev@185 544 layer.destroy();
dev@122 545 }
dev@122 546 }
dev@122 547 }
dev@122 548 }
dev@122 549
dev@54 550 renderWaveform(buffer: AudioBuffer): void {
dev@180 551 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
dev@180 552 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@189 553 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
dev@54 554 if (this.timeline) {
dev@54 555 // resize
dev@54 556 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
dev@55 557
dev@122 558 this.clearTimeline();
dev@59 559
dev@54 560 this.timeline.visibleWidth = width;
dev@54 561 this.timeline.pixelsPerSecond = width / buffer.duration;
cannam@117 562 waveTrack.height = height;
dev@54 563 } else {
dev@236 564 this.renderTimeline(buffer.duration);
dev@54 565 }
dev@83 566 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
cannam@106 567
dev@18 568 // time axis
dev@18 569 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
dev@18 570 height: height,
cannam@106 571 color: '#b0b0b0'
dev@18 572 });
cannam@117 573 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
dev@18 574
cannam@161 575 const nchannels = buffer.numberOfChannels;
cannam@161 576 const totalWaveHeight = height * 0.9;
cannam@161 577 const waveHeight = totalWaveHeight / nchannels;
dev@189 578
cannam@161 579 for (let ch = 0; ch < nchannels; ++ch) {
dev@236 580 console.log('about to construct a waveform layer for channel ' + ch);
cannam@161 581 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
dev@236 582 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
dev@236 583 height: waveHeight,
cannam@257 584 color: '#0868ac',
dev@236 585 channel: ch
cannam@161 586 });
cannam@161 587 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
cannam@161 588 }
cannam@117 589
dev@53 590 this.cursorLayer = new wavesUI.helpers.CursorLayer({
cannam@257 591 height: height,
cannam@257 592 color: '#c33c54'
dev@31 593 });
cannam@117 594 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
dev@51 595 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
cannam@117 596 waveTrack.render();
cannam@117 597 waveTrack.update();
dev@81 598
dev@196 599 this.isLoading = false;
dev@234 600 this.ref.markForCheck();
dev@53 601 this.animate();
dev@53 602 }
dev@53 603
cannam@117 604 renderSpectrogram(buffer: AudioBuffer): void {
cannam@117 605 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
dev@189 606 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
cannam@117 607
dev@129 608 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
cannam@221 609 top: 0,
cannam@221 610 height: height,
cannam@117 611 stepSize: 512,
dev@129 612 blockSize: 1024,
cannam@118 613 normalise: 'none',
cannam@118 614 mapper: this.sunsetMapper()
cannam@117 615 });
cannam@117 616 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
cannam@117 617
cannam@117 618 this.timeline.tracks.update();
cannam@117 619 }
cannam@117 620
cannam@308 621 private addLineLayers(features: VectorFeature[],
cannam@313 622 unit: string,
cannam@308 623 colour: Colour) {
cannam@298 624
cannam@308 625 // Winnow out empty features
cannam@308 626 features = features.filter(feature => (feature.data.length > 0));
dev@316 627
cannam@308 628 // First establish a [min,max] range across all of the features
cannam@308 629 let [min, max] = features.reduce((acc, feature) => {
cannam@308 630 return feature.data.reduce((acc, val) => {
cannam@308 631 const [min, max] = acc;
cannam@308 632 return [Math.min (min, val), Math.max (max, val)];
cannam@308 633 }, acc);
cannam@308 634 }, [Infinity, -Infinity]);
cannam@308 635
dev@316 636 console.log('addLineLayers: ' + features.length + ' non-empty features, overall min = ' + min + ', max = ' + max);
cannam@308 637
cannam@298 638 if (min === Infinity) {
cannam@298 639 min = 0;
cannam@298 640 max = 1;
cannam@298 641 }
cannam@308 642
cannam@298 643 if (min !== min || max !== max) {
dev@316 644 console.log('WARNING: min or max is NaN');
cannam@298 645 min = 0;
cannam@298 646 max = 1;
cannam@298 647 }
cannam@298 648
cannam@298 649 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
cannam@298 650 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
cannam@308 651
cannam@308 652 // Now add a line layer for each vector feature
cannam@308 653 const lineLayers = features.map(feature => {
cannam@308 654
cannam@309 655 let duration = 0;
cannam@309 656
cannam@309 657 // Give the plot items positions relative to the start of the
cannam@309 658 // line, rather than relative to absolute time 0. This is
cannam@309 659 // because we'll be setting the layer timeline start property
cannam@309 660 // later on and these will be positioned relative to that
dev@316 661
cannam@308 662 const plotData = [...feature.data].map((val, i) => {
cannam@309 663 const t = i * feature.stepDuration;
cannam@309 664 duration = t + feature.stepDuration;
cannam@308 665 return {
cannam@309 666 cx: t,
cannam@308 667 cy: val
cannam@308 668 };
cannam@308 669 });
dev@316 670
cannam@308 671 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
cannam@308 672 color: colour,
cannam@308 673 height: height,
cannam@308 674 yDomain: [ min, max ]
cannam@308 675 });
cannam@308 676 this.addLayer(
cannam@308 677 lineLayer,
cannam@308 678 waveTrack,
cannam@308 679 this.timeline.timeContext
cannam@308 680 );
cannam@308 681
cannam@309 682 // Set start and duration so that the highlight layer can use
cannam@309 683 // them to determine which line to draw values from
cannam@309 684 lineLayer.start = feature.startTime;
cannam@309 685 lineLayer.duration = duration;
dev@316 686
cannam@308 687 return lineLayer;
cannam@298 688 });
cannam@309 689
cannam@309 690 // And a single scale layer at left
dev@316 691 // !!! todo: unit in scale layer
cannam@298 692 const scaleLayer = new wavesUI.helpers.ScaleLayer({
cannam@298 693 tickColor: colour,
cannam@298 694 textColor: colour,
cannam@298 695 height: height,
cannam@298 696 yDomain: [ min, max ]
cannam@298 697 });
cannam@298 698 this.addLayer(
cannam@298 699 scaleLayer,
cannam@298 700 waveTrack,
cannam@298 701 this.timeline.timeContext
cannam@298 702 );
cannam@308 703
cannam@309 704 // And a single highlight layer which uses all of the line layers
cannam@309 705 // as its source material
cannam@308 706 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, {
cannam@298 707 opacity: 0.7,
cannam@298 708 height: height,
cannam@298 709 color: '#c33c54',
cannam@298 710 labelOffset: 38,
cannam@313 711 yDomain: [ min, max ],
cannam@313 712 unit
cannam@298 713 });
cannam@298 714 this.addLayer(
cannam@298 715 this.highlightLayer,
cannam@298 716 waveTrack,
cannam@298 717 this.timeline.timeContext
cannam@298 718 );
cannam@298 719 }
dev@303 720
dev@53 721 // TODO refactor - this doesn't belong here
dev@64 722 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
dev@196 723 if (this.isOneShotExtractor && !this.hasShot) {
dev@196 724 this.featureExtractionSubscription.unsubscribe();
dev@196 725 this.hasShot = true;
dev@196 726 }
dev@196 727
dev@236 728 if (!extracted.hasOwnProperty('features')
cannam@296 729 || !extracted.hasOwnProperty('outputDescriptor')) {
dev@236 730 return;
dev@236 731 }
dev@236 732 if (!extracted.features.hasOwnProperty('shape')
dev@301 733 || !extracted.features.hasOwnProperty('collected')) {
dev@236 734 return;
dev@236 735 }
dev@64 736 const features: FeatureCollection = (extracted.features as FeatureCollection);
dev@64 737 const outputDescriptor = extracted.outputDescriptor;
dev@196 738 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
dev@189 739 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
dev@64 740
dev@316 741 let unit = '';
cannam@313 742 if (outputDescriptor.configured.hasOwnProperty('unit')) {
cannam@313 743 unit = outputDescriptor.configured.unit;
cannam@313 744 }
cannam@313 745
dev@64 746 // TODO refactor all of this
dev@63 747 switch (features.shape) {
cannam@298 748
cannam@298 749 case 'vector': {
cannam@299 750 const collected = features.collected as VectorFeature;
cannam@313 751 this.addLineLayers([collected], unit, colour);
cannam@296 752 break;
dev@64 753 }
dev@303 754
cannam@308 755 case 'tracks': {
cannam@308 756 const collected = features.collected as TracksFeature;
cannam@313 757 this.addLineLayers(collected, unit, colour);
cannam@308 758 break;
cannam@308 759 }
dev@316 760
dev@64 761 case 'list': {
dev@301 762 const featureData = features.collected as FeatureList;
dev@236 763 if (featureData.length === 0) {
dev@236 764 return;
dev@236 765 }
dev@319 766
dev@64 767 // TODO refactor, this is incomprehensible
dev@319 768 try {
dev@319 769 const featureShape = deduceHigherLevelFeatureShape(
dev@319 770 featureData,
dev@319 771 outputDescriptor
dev@122 772 );
dev@319 773 switch (featureShape) {
dev@319 774 case 'instants':
dev@319 775 const plotData = featureData.map(feature => ({
dev@319 776 time: toSeconds(feature.timestamp),
dev@319 777 label: feature.label
dev@319 778 }));
dev@319 779 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
dev@319 780 height: height,
dev@319 781 color: colour,
dev@319 782 labelPosition: 'bottom',
dev@319 783 shadeSegments: true
dev@67 784 });
dev@319 785 this.addLayer(
dev@319 786 featureLayer,
dev@319 787 waveTrack,
dev@319 788 this.timeline.timeContext
dev@319 789 );
dev@319 790 break;
dev@319 791 case 'regions':
dev@319 792 this.renderRegions(
dev@319 793 featureData,
dev@319 794 outputDescriptor,
dev@319 795 waveTrack,
dev@319 796 height,
dev@319 797 colour
dev@319 798 );
dev@319 799 break;
dev@319 800 case 'notes':
cannam@333 801 const notes = mapFeaturesToNotes(featureData, outputDescriptor);
cannam@333 802 let [min, max] = notes.reduce((acc, note) => {
cannam@333 803 const [min, max] = acc;
cannam@333 804 return [Math.min (min, note.pitch), Math.max (max, note.pitch)];
cannam@333 805 }, [Infinity, -Infinity]);
cannam@333 806 if (min === Infinity || min < 0 || max < 0) {
cannam@333 807 min = 0;
cannam@333 808 max = 127;
cannam@333 809 }
cannam@333 810 // round min and max to octave boundaries (starting at C as in MIDI)
cannam@333 811 min = 12 * Math.floor(min / 12);
cannam@333 812 max = 12 * Math.ceil(max / 12);
dev@319 813 const pianoRollLayer = new wavesUI.helpers.PianoRollLayer(
cannam@333 814 notes,
cannam@333 815 {height: height, color: colour, yDomain: [min, max] }
dev@319 816 );
dev@319 817 this.addLayer(
dev@319 818 pianoRollLayer,
dev@319 819 waveTrack,
dev@319 820 this.timeline.timeContext
dev@319 821 );
dev@319 822 break;
dev@319 823 }
dev@319 824 } catch (e) {
dev@319 825 console.warn(e); // TODO display
dev@319 826 break;
dev@64 827 }
dev@64 828 break;
dev@64 829 }
cannam@106 830 case 'matrix': {
dev@303 831 const collected = features.collected as MatrixFeature;
dev@316 832 const startTime = collected.startTime; // !!! + make use of
cannam@296 833 const stepDuration = collected.stepDuration;
cannam@296 834 const matrixData = collected.data;
dev@335 835
dev@236 836 if (matrixData.length === 0) {
dev@236 837 return;
dev@236 838 }
dev@236 839
dev@236 840 console.log('matrix data length = ' + matrixData.length);
dev@236 841 console.log('height of first column = ' + matrixData[0].length);
cannam@109 842 const targetValue = this.estimatePercentile(matrixData, 95);
cannam@108 843 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
dev@236 844 console.log('setting gain to ' + gain);
cannam@120 845 const matrixEntity =
cannam@120 846 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
dev@301 847 0, // startTime
dev@303 848 stepDuration);
dev@236 849 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
cannam@108 850 gain,
cannam@221 851 top: 0,
cannam@221 852 height: height,
cannam@109 853 normalise: 'none',
cannam@108 854 mapper: this.iceMapper()
cannam@108 855 });
dev@122 856 this.addLayer(
cannam@108 857 matrixLayer,
cannam@117 858 waveTrack,
cannam@108 859 this.timeline.timeContext
dev@122 860 );
cannam@108 861 break;
cannam@106 862 }
dev@67 863 default:
dev@236 864 console.log(
dev@236 865 `Cannot render an appropriate layer for feature shape '${features.shape}'`
dev@236 866 );
dev@63 867 }
dev@59 868
dev@196 869 this.isLoading = false;
dev@234 870 this.ref.markForCheck();
dev@56 871 this.timeline.tracks.update();
dev@336 872 this.animate();
dev@53 873 }
dev@53 874
dev@53 875 private animate(): void {
dev@236 876 if (!this.isSeeking) {
dev@236 877 return;
dev@236 878 }
dev@196 879
dev@31 880 this.ngZone.runOutsideAngular(() => {
dev@31 881 // listen for time passing...
dev@31 882 const updateSeekingCursor = () => {
dev@53 883 const currentTime = this.audioService.getCurrentTime();
dev@53 884 this.cursorLayer.currentPosition = currentTime;
dev@53 885 this.cursorLayer.update();
dev@53 886
cannam@254 887 if (typeof(this.highlightLayer) !== 'undefined') {
cannam@254 888 this.highlightLayer.currentPosition = currentTime;
cannam@254 889 this.highlightLayer.update();
cannam@254 890 }
cannam@254 891
dev@53 892 const currentOffset = this.timeline.timeContext.offset;
dev@53 893 const offsetTimestamp = currentOffset
dev@53 894 + currentTime;
dev@53 895
dev@53 896 const visibleDuration = this.timeline.timeContext.visibleDuration;
dev@53 897 // TODO reduce duplication between directions and make more declarative
dev@53 898 // this kinda logic should also be tested
dev@53 899 const mustPageForward = offsetTimestamp > visibleDuration;
dev@53 900 const mustPageBackward = currentTime < -currentOffset;
dev@53 901
dev@53 902 if (mustPageForward) {
dev@53 903 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
dev@53 904
dev@301 905 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
dev@301 906 -currentTime + 0.5 * visibleDuration :
dev@301 907 currentOffset - visibleDuration;
dev@51 908 this.timeline.tracks.update();
dev@34 909 }
dev@53 910
dev@53 911 if (mustPageBackward) {
dev@53 912 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
dev@301 913 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
dev@301 914 -currentTime + 0.5 * visibleDuration :
dev@301 915 currentOffset + visibleDuration;
dev@51 916 this.timeline.tracks.update();
dev@34 917 }
dev@53 918
dev@236 919 if (this.isPlaying) {
dev@53 920 requestAnimationFrame(updateSeekingCursor);
dev@236 921 }
dev@31 922 };
dev@31 923 updateSeekingCursor();
dev@31 924 });
dev@6 925 }
dev@16 926
dev@319 927 // TODO not sure how much of the logic in here is actually sensible w.r.t
dev@319 928 // what it functionally produces
dev@319 929 private renderRegions(featureData: FeatureList,
dev@319 930 outputDescriptor: OutputDescriptor,
dev@319 931 waveTrack: any,
dev@319 932 height: number,
dev@319 933 colour: Colour) {
dev@319 934 console.log('Output is of region type');
dev@319 935 const binCount = outputDescriptor.configured.binCount || 0;
dev@319 936 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
dev@319 937 const getSegmentArgs = () => {
dev@319 938 if (isBarRegion) {
dev@319 939
dev@319 940 // TODO refactor - this is messy
dev@319 941 interface FoldsToNumber<T> {
dev@319 942 reduce(fn: (previousValue: number,
dev@319 943 currentValue: T,
dev@319 944 currentIndex: number,
dev@319 945 array: ArrayLike<T>) => number,
dev@319 946 initialValue?: number): number;
dev@319 947 }
dev@319 948
dev@319 949 // TODO potentially change impl., i.e avoid reduce
dev@319 950 const findMin = <T>(arr: FoldsToNumber<T>,
dev@319 951 getElement: (x: T) => number): number => {
dev@319 952 return arr.reduce(
dev@319 953 (min, val) => Math.min(min, getElement(val)),
dev@319 954 Infinity
dev@319 955 );
dev@319 956 };
dev@319 957
dev@319 958 const findMax = <T>(arr: FoldsToNumber<T>,
dev@319 959 getElement: (x: T) => number): number => {
dev@319 960 return arr.reduce(
dev@319 961 (min, val) => Math.max(min, getElement(val)),
dev@319 962 -Infinity
dev@319 963 );
dev@319 964 };
dev@319 965
dev@319 966 const min = findMin<Feature>(featureData, (x: Feature) => {
dev@319 967 return findMin<number>(x.featureValues, y => y);
dev@319 968 });
dev@319 969
dev@319 970 const max = findMax<Feature>(featureData, (x: Feature) => {
dev@319 971 return findMax<number>(x.featureValues, y => y);
dev@319 972 });
dev@319 973
dev@319 974 const barHeight = 1.0 / height;
dev@319 975 return [
dev@319 976 featureData.reduce((bars, feature) => {
dev@319 977 const staticProperties = {
dev@319 978 x: toSeconds(feature.timestamp),
dev@319 979 width: toSeconds(feature.duration),
dev@319 980 height: min + barHeight,
dev@319 981 color: colour,
dev@319 982 opacity: 0.8
dev@319 983 };
dev@319 984 // TODO avoid copying Float32Array to an array - map is problematic here
dev@319 985 return bars.concat([...feature.featureValues]
dev@319 986 .map(val => Object.assign({}, staticProperties, {y: val})));
dev@319 987 }, []),
dev@319 988 {yDomain: [min, max + barHeight], height: height} as any
dev@319 989 ];
dev@319 990 } else {
dev@319 991 return [featureData.map(feature => ({
dev@319 992 x: toSeconds(feature.timestamp),
dev@319 993 width: toSeconds(feature.duration),
dev@319 994 color: colour,
dev@319 995 opacity: 0.8
dev@319 996 })), {height: height}];
dev@319 997 }
dev@319 998 };
dev@319 999
dev@319 1000 const segmentLayer = new wavesUI.helpers.SegmentLayer(
dev@319 1001 ...getSegmentArgs()
dev@319 1002 );
dev@319 1003 this.addLayer(
dev@319 1004 segmentLayer,
dev@319 1005 waveTrack,
dev@319 1006 this.timeline.timeContext
dev@319 1007 );
dev@319 1008 }
dev@319 1009
dev@122 1010 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
dev@54 1011 timeContext.zoom = 1.0;
dev@54 1012 if (!layer.timeContext) {
dev@54 1013 layer.setTimeContext(isAxis ?
dev@54 1014 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
dev@54 1015 }
dev@54 1016 track.add(layer);
dev@185 1017 this.layers.push(layer);
dev@54 1018 layer.render();
dev@54 1019 layer.update();
dev@122 1020 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
dev@112 1021 track.$layout.appendChild(this.cursorLayer.$el);
dev@112 1022 }
dev@59 1023 }
dev@59 1024
dev@51 1025 ngOnDestroy(): void {
dev@236 1026 if (this.featureExtractionSubscription) {
dev@196 1027 this.featureExtractionSubscription.unsubscribe();
dev@236 1028 }
dev@236 1029 if (this.playingStateSubscription) {
dev@196 1030 this.playingStateSubscription.unsubscribe();
dev@236 1031 }
dev@236 1032 if (this.seekedSubscription) {
dev@196 1033 this.seekedSubscription.unsubscribe();
dev@236 1034 }
dev@236 1035 if (this.onAudioDataSubscription) {
dev@196 1036 this.onAudioDataSubscription.unsubscribe();
dev@236 1037 }
dev@51 1038 }
dev@154 1039
dev@155 1040 seekStart(): void {
dev@155 1041 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
dev@157 1042 this.offsetOnMouseDown = this.timeline.timeContext.offset;
dev@155 1043 }
dev@155 1044
dev@155 1045 seekEnd(x: number): void {
dev@157 1046 const hasSameZoom: boolean = this.zoomOnMouseDown ===
dev@157 1047 this.timeline.timeContext.zoom;
dev@157 1048 const hasSameOffset: boolean = this.offsetOnMouseDown ===
dev@157 1049 this.timeline.timeContext.offset;
dev@157 1050 if (hasSameZoom && hasSameOffset) {
dev@155 1051 this.seek(x);
dev@155 1052 }
dev@155 1053 }
dev@155 1054
dev@154 1055 seek(x: number): void {
dev@154 1056 if (this.timeline) {
dev@154 1057 const timeContext: any = this.timeline.timeContext;
dev@196 1058 if (this.isSeeking) {
dev@196 1059 this.audioService.seekTo(
dev@236 1060 timeContext.timeToPixel.invert(x) - timeContext.offset
dev@196 1061 );
dev@196 1062 }
dev@154 1063 }
dev@154 1064 }
dev@6 1065 }
dev@319 1066
dev@319 1067 function deduceHigherLevelFeatureShape(featureData: FeatureList,
dev@319 1068 descriptor: OutputDescriptor)
dev@319 1069 : HigherLevelFeatureShape {
dev@319 1070 // TODO look at output descriptor instead of directly inspecting features
dev@319 1071 const hasDuration = descriptor.configured.hasDuration;
dev@319 1072 const binCount = descriptor.configured.binCount;
dev@319 1073 const isMarker = !hasDuration
dev@319 1074 && binCount === 0
dev@319 1075 && featureData[0].featureValues == null;
dev@319 1076
dev@319 1077 const isMaybeNote = getCanonicalNoteLikeUnit(descriptor.configured.unit)
dev@319 1078 && [1, 2].find(nBins => nBins === binCount);
dev@319 1079
dev@319 1080 const isRegionLike = hasDuration && featureData[0].timestamp != null;
dev@319 1081
dev@319 1082 const isNote = isMaybeNote && isRegionLike;
dev@319 1083 const isRegion = !isMaybeNote && isRegionLike;
dev@319 1084 if (isMarker) {
dev@319 1085 return 'instants';
dev@319 1086 }
dev@319 1087 if (isNote) {
dev@319 1088 return 'notes';
dev@319 1089 }
dev@319 1090 if (isRegion) {
dev@319 1091 return 'regions';
dev@319 1092 }
dev@335 1093 throw new Error('No shape could be deduced');
dev@319 1094 }
dev@319 1095
dev@319 1096 function getCanonicalNoteLikeUnit(unit: string): NoteLikeUnit | null {
dev@319 1097 const canonicalUnits: NoteLikeUnit[] = ['midi', 'hz'];
dev@319 1098 return canonicalUnits.find(canonicalUnit => {
dev@335 1099 return unit.toLowerCase().indexOf(canonicalUnit) >= 0;
dev@319 1100 });
dev@319 1101 }
dev@319 1102
dev@319 1103 function mapFeaturesToNotes(featureData: FeatureList,
dev@319 1104 descriptor: OutputDescriptor): Note[] {
dev@319 1105 const canonicalUnit = getCanonicalNoteLikeUnit(descriptor.configured.unit);
dev@319 1106 const isHz = canonicalUnit === 'hz';
dev@319 1107 return featureData.map(feature => ({
dev@319 1108 time: toSeconds(feature.timestamp),
dev@319 1109 duration: toSeconds(feature.duration),
dev@319 1110 pitch: isHz ?
dev@319 1111 frequencyToMidiNote(feature.featureValues[0]) : feature.featureValues[0]
dev@319 1112 }));
dev@319 1113 }
dev@319 1114
dev@319 1115 function frequencyToMidiNote(frequency: number,
dev@319 1116 concertA: number = 440.0): number {
dev@319 1117 return 69 + 12 * Math.log2(frequency / concertA);
dev@319 1118 }