Mercurial > hg > cmdp
view src/uk/ac/qmul/eecs/depic/daw/haptics/HapticTrigger.java @ 2:c0412c81d274
Added documentation
author | Fiore Martin <f.martin@qmul.ac.uk> |
---|---|
date | Thu, 18 Feb 2016 18:35:26 +0000 |
parents | 3074a84ef81e |
children |
line wrap: on
line source
package uk.ac.qmul.eecs.depic.daw.haptics; import uk.ac.qmul.eecs.depic.daw.Chunk; import uk.ac.qmul.eecs.depic.daw.SoundWave; import uk.ac.qmul.eecs.depic.daw.SoundWaveEvent; import uk.ac.qmul.eecs.depic.daw.SoundWaveListener; import uk.ac.qmul.eecs.depic.jhapticgui.Haptics; import uk.ac.qmul.eecs.depic.patterns.MathUtils; import uk.ac.qmul.eecs.depic.patterns.Range; import uk.ac.qmul.eecs.depic.patterns.Sequence; import uk.ac.qmul.eecs.depic.patterns.Sequence.Value; import uk.ac.qmul.eecs.depic.patterns.SequenceEvent; import uk.ac.qmul.eecs.depic.patterns.SequenceListener; import uk.ac.qmul.eecs.depic.patterns.SequenceMapping; /** * * Listens to elements of the DAW and sends commands to the haptic device in order to * render such elements in haptics. It implements all the listeners interfaces because it reacts * to changes and mirrors them in haptics. * * It is used to render sound waves and automations (sequences) in haptics * */ public class HapticTrigger implements SequenceMapping, SequenceListener, SoundWaveListener { public enum DisplayType { DISPLAY_SEQUENCE, DISPLAY_NONE, DISPLAY_PEAKS, DISPLAY_RENDER_CURVE_AT, DISPLAY_RENDER_VALUE } public abstract class Command { /** * Message to display a graph * * Arguments: intitial y value, max y value, min y value, max x value, x viewport size */ public static final String DISPLAY_SEQUENCE = "display.seq"; public static final String DISPLAY_RENDER_CURVE_AT = "display.curve_at"; public static final String DISPLAY_RENDER_VALUE = "display.value"; /** * display nothing. takes no args */ public static final String DISPLAY_NONE = "display.none"; public static final String DISPLAY_PEAKS = "display.peaks"; /** * Arguments : x time position, y in normalized form */ public static final String SEQUENCE_VALUE_ADD = "seq.value.add"; public static final String SEQUENCE_VALUE_CHANGE = "seq.value.change"; public static final String SEQUENCE_VALUE_REMOVE = "seq.value.rem"; public static final String SEQUENCE_VALUE_FIND = "seq.value.find"; public static final String SEQUENCE_SHIFT = "seq.shift"; public static final String SEQUENCE_BEGIN = "seq.begin"; public static final String RENDER_VALUE = "render_value"; public static final String RENDER_CURVE_AT = "render_curve_at"; /** * Message to rotate the viscosity scrub line in the haptic space * * Argument is the degree of rotation */ public static final String ROTATE_Z = "rotate.z"; public static final String ROTATE_Y = "rotate.y"; public static final String ROTATE_X = "rotate.x"; } private DisplayType displayType; private Haptics haptics; public HapticTrigger(Haptics haptics, DisplayType type){ if(haptics == null) throw new IllegalArgumentException("haptics cannot be null"); this.haptics = haptics; displayType = type; haptics.sendMessage(Command.DISPLAY_NONE,"",0); } @Override public void renderValue(Value val) { /* sends a normalized value of the chunk size */ haptics.sendMessage(Command.RENDER_VALUE, Float.toString(val.getValue()) , val.hashCode()); } @Override public void renderCurve(Sequence m, float startTime) { throw new UnsupportedOperationException("Only update implemented"); } @Override public void renderCurveAt(Sequence sequence, float time, float duration) { float val = new MathUtils.Interpolate(sequence).linear(time); /* normalize the value to [0-1] range */ float normalizedValue = new MathUtils.Scale(sequence.getRange(), new Range<Float>(0.0f,1.0f)).linear(val); haptics.sendMessage(Command.RENDER_CURVE_AT, ""+normalizedValue, sequence.hashCode()); } @Override public <T extends SequenceEvent> void sequenceUpdated(T t) { SequenceEvent.What evtType = t.getWhat(); if(displayType == DisplayType.DISPLAY_SEQUENCE) { if(SequenceEvent.What.VALUE_ADDED.equals(evtType)){ Sequence.Value aVal = t.getValue(); /* since the haptic graph y value are in normalized form normalize first */ float verticalValue = new MathUtils.Scale(t.getSource().getRange(), Range.NORMALIZED_RANGE_F).linear(aVal.getValue()); haptics.sendMessage(Command.SEQUENCE_VALUE_ADD, aVal.getTimePosition()+" "+verticalValue, aVal.hashCode()); }else if(SequenceEvent.What.VALUE_CHANGED.equals(evtType)){ Sequence.Value aVal = t.getValue(); float verticalValue = new MathUtils.Scale(t.getSource().getRange(), Range.NORMALIZED_RANGE_F).linear(aVal.getValue()); haptics.sendMessage(Command.SEQUENCE_VALUE_CHANGE, aVal.getTimePosition() +" "+verticalValue, aVal.hashCode()); }else if(SequenceEvent.What.VALUE_REMOVED.equals(evtType)){ Sequence.Value aVal = t.getValue(); haptics.sendMessage(Command.SEQUENCE_VALUE_REMOVE, "", aVal.hashCode()); }else if(SequenceEvent.What.BEGIN_CHANGED.equals(evtType)){ Sequence sequence = t.getSource(); haptics.sendMessage(Command.SEQUENCE_BEGIN, ""+new MathUtils.Scale(sequence.getRange(),Range.NORMALIZED_RANGE_F).linear(sequence.getBegin()), 0 ); } } } /** * Gets an update from a {@code SoundWave} this class is a listener of, and send a message to the {@code HapticDevice} * thread accordingly. * * @param evt the new event */ @Override public void update(SoundWaveEvent evt) { String evtType = evt.getType(); if(SoundWaveEvent.POSITION_CHANGED.equals(evtType)){ SoundWave wave = evt.getSource(); int pos = (Integer)evt.getArgs(); if(pos < wave.getChunkNum()){ Chunk chunk = wave.getChunkAt(pos); float chunkSize = chunk.getNormEnd() - chunk.getNormStart(); /* sends a normalized value of the chunk size */ haptics.sendMessage(Command.RENDER_VALUE, Float.toString(chunkSize) , chunk.hashCode()); }else{ /* if the scub goes past the audio wave just send wave amp = 0 */ haptics.sendMessage(Command.RENDER_VALUE, "0", 0); } }else if(SoundWaveEvent.CLOSE.equals(evtType)){ /* removes itself from listeners */ evt.getSource().removeSoundWaveListener(this); } } }