view tim_grid_mapper/grid_mapper.py @ 3:7bd1f07044ab

Added grid_mapper to map from Kinect data to Joe's synth and Ableton. Added OSC.py which is a library file. You need it to run grid_mapper.py but please don't change it.
author Tim MB <tim.murraybrowne@eecs.qmul.ac.uk>
date Tue, 15 Feb 2011 17:31:11 +0000
parents
children 5f9ad838d417
line wrap: on
line source
'''
grid_mapper.py - maintained by Tim.

This module implements a mapping from person positions (id,x,y,z) to generate 
pitch, velocities and channels for Joe's synthesiser and controller data for
Ableton.

'''

from OSC import ThreadingOSCServer, OSCClient, OSCMessage, OSCClientError

#### PUBLIC OPTIONS ####
num_instruments = 3 # number of output channels

#### OSC OPTIONS - THESE NEED TO BE SET MANUALLY ####
my_port = 2001 # to receive OSC messages
joe     = ('localhost', " PUT JOE's PORT NUMBER IN HERE")
ableton = ('localhost', " PUT ABLETON'S PORT NUMBER IN HERE ")

### Constants for grid mapping:
# The range of values that the input coordinates and output values may take:
# (ranges are inclusive)
MIN = {
	'x'		: 0.,
	'y'		: 0.,
	'z'		: 0.,
	'pitch'	: 0,
	'cc1'   : 0,
	'cc2'   : 0,
}
MAX = {
	'x'     : 1.,
	'y'     : 1.,
	'z'     : 1.,
	'pitch' : 15,
	'cc1'   : 127,
	'cc2'   : 127,
}



#### PRIVATE VARIABLES ####
person_positions  = {} # mapping from personId to x,y,z
last_update_times = {} # mapping from personId to time of last update

# OSC OBJECTS
server = ThreadingOSCServer(('localhost'), my_port)
client = OSCClient()


def send_to_joe(data, address='/test'):
	'''Sends `data` to Joe directly as an OSC message.
	'''
	message = OSCMesssage(address)
	message.extend(data)
	client.sendto(message, joe)



def person_handler(address, tags, data, client_address):
	''' Handles OSC input matching the 'person' tag.
	
	`data` is [person_id, x, y, z]
	'''
	pitch, velocity, channel, cc1, cc2 = grid_map(data)
	
	## Format data for Joe - done using Specification.txt on 2011-02-15
	
	# constrain and round off pitch and velocity
	pitch = max(min(round(pitch), MAX['pitch']), MIN['pitch'])
	velocity = max(min(round(pitch), MAX['velocity']), MIN['velocity'])
	
	# turn integer pitch into a single 1 in a boolean array of 0s
	boolean_note_array = [0] * (MAX['pitch'] - MIN['pitch'] + 1)
	boolean_note_array[pitch] = 1
	
	# seperate boolean for note-on and note-off
	note_on = velocity > 0
	# Never send velocity == 0
	if velocity==0: 
		velocity = 127
	THIS NEEDS FINISHING.
	



def grid_map(person_id, x, y, z):
	'''This function maps from a person's location to MIDI data
	returning a tuple (pitch, velocity, channel, cc1, cc2).
	
	The current mapping creates higher pitch values as the person moves
	closer to the Kinect (i.e. z decreases). x and y values are mapped to cc1 
	and cc2 (to be sent straight to Ableton and determined by a particular
	synth)
	
	NB. channel == person_id and velocity==0 when note is off.
	Midi-Velocity is currently unimplemented but will use Person-velocity data
	when that becomes available.
	This function does not guarantee that the output will be in range if the 
	input goes out of range.
	'''
	pitch = round(interpolate(z, 
	                          MIN['z'],     MAX['z'],
	                          MIN['pitch'], MAX['pitch'])
	        )
	cc1 = round(interpolate(x, 
	                          MIN['x'],     MAX['x'],
	                          MIN['cc1'], MAX['cc1'])
	        )
	cc2 = round(interpolate(x, 
	                          MIN['y'],     MAX['y'],
	                          MIN['cc2'], MAX['cc2'])
	        )
	velocity = 127
	return (pitch, velocity, person_id, cc1, cc2)
	



def interpolate(x, a, b, A, B):
	''' Interpolates x from the range [a, b] to the range [A, B].
	
	
	Interpolation is linear. From [a,b] to [A,B] this would be:
	(B-A)*(x-a)/(b-a) + A
	'''
	return (B-A)*(x-a)/(b-a) + A