annotate src/com/aurellem/capture/audio/AudioSendRenderer.java @ 65:23e3df41db3c

reformatting for web
author Robert McIntyre <rlm@mit.edu>
date Sat, 11 Feb 2012 12:25:26 -0700
parents 2f129118e2d6
children
rev   line source
rlm@11 1 package com.aurellem.capture.audio;
rlm@11 2
rlm@43 3 import java.io.IOException;
rlm@11 4 import java.lang.reflect.Field;
rlm@11 5 import java.nio.ByteBuffer;
rlm@11 6 import java.util.HashMap;
rlm@11 7 import java.util.Vector;
rlm@11 8 import java.util.concurrent.CountDownLatch;
rlm@11 9 import java.util.logging.Level;
rlm@11 10 import java.util.logging.Logger;
rlm@11 11
rlm@29 12 import javax.sound.sampled.AudioFormat;
rlm@29 13
rlm@11 14 import org.lwjgl.LWJGLException;
rlm@11 15 import org.lwjgl.openal.AL;
rlm@11 16 import org.lwjgl.openal.AL10;
rlm@11 17 import org.lwjgl.openal.ALCdevice;
rlm@11 18 import org.lwjgl.openal.OpenALException;
rlm@11 19
rlm@11 20 import com.aurellem.send.AudioSend;
rlm@11 21 import com.jme3.audio.Listener;
rlm@11 22 import com.jme3.audio.lwjgl.LwjglAudioRenderer;
rlm@11 23 import com.jme3.math.Vector3f;
rlm@43 24 import com.jme3.system.JmeSystem;
rlm@43 25 import com.jme3.system.Natives;
rlm@11 26 import com.jme3.util.BufferUtils;
rlm@11 27
rlm@11 28 public class AudioSendRenderer
rlm@11 29
rlm@65 30 extends LwjglAudioRenderer implements MultiListener {
rlm@11 31
rlm@65 32 private AudioSend audioSend;
rlm@65 33 private AudioFormat outFormat;
rlm@11 34
rlm@65 35 /**
rlm@65 36 * Keeps track of all the listeners which have been registered
rlm@65 37 * so far. The first element is <code>null</code>, which
rlm@65 38 * represents the zeroth LWJGL listener which is created
rlm@65 39 * automatically.
rlm@65 40 */
rlm@65 41 public Vector<Listener> listeners = new Vector<Listener>();
rlm@11 42
rlm@65 43 public void initialize(){
rlm@65 44 super.initialize();
rlm@65 45 listeners.add(null);
rlm@65 46 }
rlm@65 47
rlm@65 48 /**
rlm@65 49 * This is to call the native methods which require the OpenAL
rlm@65 50 * device ID. Currently it is obtained through reflection.
rlm@65 51 */
rlm@65 52 private long deviceID;
rlm@65 53
rlm@65 54 /**
rlm@65 55 * To ensure that <code>deviceID<code> and
rlm@65 56 * <code>listeners<code> are properly initialized before any
rlm@65 57 * additional listeners are added.
rlm@65 58 */
rlm@65 59 private CountDownLatch latch = new CountDownLatch(1);
rlm@65 60
rlm@65 61 /**
rlm@65 62 * Each listener (including the main LWJGL listener) can be
rlm@65 63 * registered with a <code>SoundProcessor</code>, which this
rlm@65 64 * Renderer will call whenever there is new audio data to be
rlm@65 65 * processed.
rlm@65 66 */
rlm@65 67 public HashMap<Listener, SoundProcessor> soundProcessorMap =
rlm@65 68 new HashMap<Listener, SoundProcessor>();
rlm@65 69
rlm@65 70 /**
rlm@65 71 * Create a new slave context on the recorder device which
rlm@65 72 * will render all the sounds in the main LWJGL context with
rlm@65 73 * respect to this listener.
rlm@65 74 */
rlm@65 75 public void addListener(Listener l) {
rlm@65 76 try {this.latch.await();}
rlm@65 77 catch (InterruptedException e) {e.printStackTrace();}
rlm@65 78 audioSend.addListener();
rlm@65 79 this.listeners.add(l);
rlm@65 80 l.setRenderer(this);
rlm@65 81 }
rlm@65 82
rlm@65 83 /**
rlm@65 84 * Whenever new data is rendered in the perspective of this
rlm@65 85 * listener, this Renderer will send that data to the
rlm@65 86 * SoundProcessor of your choosing.
rlm@65 87 */
rlm@65 88 public void registerSoundProcessor(Listener l, SoundProcessor sp) {
rlm@65 89 this.soundProcessorMap.put(l, sp);
rlm@65 90 }
rlm@65 91
rlm@65 92 /**
rlm@65 93 * Registers a SoundProcessor for the main LWJGL context. Ig all
rlm@65 94 * you want to do is record the sound you would normally hear in
rlm@65 95 * your application, then this is the only method you have to
rlm@65 96 * worry about.
rlm@65 97 */
rlm@65 98 public void registerSoundProcessor(SoundProcessor sp){
rlm@65 99 // register a sound processor for the default listener.
rlm@65 100 this.soundProcessorMap.put(null, sp);
rlm@65 101 }
rlm@65 102
rlm@65 103 private static final Logger logger =
rlm@65 104 Logger.getLogger(AudioSendRenderer.class.getName());
rlm@65 105
rlm@65 106 /**
rlm@65 107 * Instead of taking whatever device is available on the system,
rlm@65 108 * this call creates the "Multiple Audio Send" device, which
rlm@65 109 * supports multiple listeners in a limited capacity. For each
rlm@65 110 * listener, the device renders it not to the sound device, but
rlm@65 111 * instead to buffers which it makes available via JNI.
rlm@65 112 */
rlm@65 113 public void initInThread(){
rlm@65 114 try{
rlm@65 115 switch (JmeSystem.getPlatform()){
rlm@65 116 case Windows64:
rlm@65 117 Natives.extractNativeLib("windows/audioSend",
rlm@65 118 "OpenAL64", true, true);
rlm@65 119 break;
rlm@65 120 case Windows32:
rlm@65 121 Natives.extractNativeLib("windows/audioSend",
rlm@65 122 "OpenAL32", true, true);
rlm@65 123 break;
rlm@65 124 case Linux64:
rlm@65 125 Natives.extractNativeLib("linux/audioSend",
rlm@65 126 "openal64", true, true);
rlm@65 127 break;
rlm@65 128 case Linux32:
rlm@65 129 Natives.extractNativeLib("linux/audioSend",
rlm@65 130 "openal", true, true);
rlm@65 131 break;
rlm@65 132 }
rlm@11 133 }
rlm@65 134 catch (IOException ex) {ex.printStackTrace();}
rlm@11 135
rlm@65 136 try{
rlm@11 137 if (!AL.isCreated()){
rlm@38 138 AL.create("Multiple Audio Send", 44100, 60, false);
rlm@11 139 }
rlm@11 140 }catch (OpenALException ex){
rlm@11 141 logger.log(Level.SEVERE, "Failed to load audio library", ex);
rlm@11 142 System.exit(1);
rlm@11 143 return;
rlm@11 144 }catch (LWJGLException ex){
rlm@11 145 logger.log(Level.SEVERE, "Failed to load audio library", ex);
rlm@11 146 System.exit(1);
rlm@11 147 return;
rlm@11 148 }
rlm@65 149 super.initInThread();
rlm@11 150
rlm@65 151 ALCdevice device = AL.getDevice();
rlm@11 152
rlm@65 153 // RLM: use reflection to grab the ID of our device for use
rlm@65 154 // later.
rlm@65 155 try {
rlm@65 156 Field deviceIDField;
rlm@65 157 deviceIDField = ALCdevice.class.getDeclaredField("device");
rlm@65 158 deviceIDField.setAccessible(true);
rlm@65 159 try {deviceID = (Long)deviceIDField.get(device);}
rlm@65 160 catch (IllegalArgumentException e) {e.printStackTrace();}
rlm@65 161 catch (IllegalAccessException e) {e.printStackTrace();}
rlm@65 162 deviceIDField.setAccessible(false);}
rlm@65 163 catch (SecurityException e) {e.printStackTrace();}
rlm@65 164 catch (NoSuchFieldException e) {e.printStackTrace();}
rlm@11 165
rlm@65 166 this.audioSend = new AudioSend(this.deviceID);
rlm@65 167 this.outFormat = audioSend.getAudioFormat();
rlm@65 168 initBuffer();
rlm@39 169
rlm@65 170 // The LWJGL context must be established as the master context
rlm@65 171 // before any other listeners can be created on this device.
rlm@65 172 audioSend.initDevice();
rlm@65 173 // Now, everything is initialized, and it is safe to add more
rlm@65 174 // listeners.
rlm@65 175 latch.countDown();
rlm@65 176 }
rlm@65 177
rlm@65 178 public void cleanup(){
rlm@65 179 for(SoundProcessor sp : this.soundProcessorMap.values()){
rlm@65 180 sp.cleanup();
rlm@11 181 }
rlm@65 182 super.cleanup();
rlm@65 183 }
rlm@65 184
rlm@65 185 public void updateAllListeners(){
rlm@65 186 for (int i = 0; i < this.listeners.size(); i++){
rlm@65 187 Listener lis = this.listeners.get(i);
rlm@65 188 if (null != lis){
rlm@65 189 Vector3f location = lis.getLocation();
rlm@65 190 Vector3f velocity = lis.getVelocity();
rlm@65 191 Vector3f orientation = lis.getUp();
rlm@65 192 float gain = lis.getVolume();
rlm@65 193 audioSend.setNthListener3f
rlm@65 194 (AL10.AL_POSITION,
rlm@65 195 location.x, location.y, location.z, i);
rlm@65 196 audioSend.setNthListener3f
rlm@65 197 (AL10.AL_VELOCITY,
rlm@65 198 velocity.x, velocity.y, velocity.z, i);
rlm@65 199 audioSend.setNthListener3f
rlm@65 200 (AL10.AL_ORIENTATION,
rlm@65 201 orientation.x, orientation.y, orientation.z, i);
rlm@65 202 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);
rlm@65 203 }
rlm@65 204 }
rlm@65 205 }
rlm@11 206
rlm@65 207 private ByteBuffer buffer;;
rlm@65 208
rlm@65 209 public static final int MIN_FRAMERATE = 10;
rlm@11 210
rlm@65 211 private void initBuffer(){
rlm@65 212 int bufferSize =
rlm@65 213 (int)(this.outFormat.getSampleRate() /
rlm@65 214 ((float)MIN_FRAMERATE)) *
rlm@65 215 this.outFormat.getFrameSize();
rlm@65 216
rlm@65 217 this.buffer = BufferUtils.createByteBuffer(bufferSize);
rlm@65 218 }
rlm@65 219
rlm@65 220 public void dispatchAudio(float tpf){
rlm@65 221
rlm@65 222 int samplesToGet = (int) (tpf * outFormat.getSampleRate());
rlm@65 223 try {latch.await();}
rlm@65 224 catch (InterruptedException e) {e.printStackTrace();}
rlm@65 225 audioSend.step(samplesToGet);
rlm@65 226 updateAllListeners();
rlm@65 227
rlm@65 228 for (int i = 0; i < this.listeners.size(); i++){
rlm@65 229 buffer.clear();
rlm@65 230 audioSend.getSamples(buffer, samplesToGet, i);
rlm@65 231 SoundProcessor sp =
rlm@65 232 this.soundProcessorMap.get(this.listeners.get(i));
rlm@65 233 if (null != sp){
rlm@65 234 sp.process
rlm@65 235 (buffer,
rlm@65 236 samplesToGet*outFormat.getFrameSize(), outFormat);}
rlm@11 237 }
rlm@65 238 }
rlm@36 239
rlm@65 240 public void update(float tpf){
rlm@65 241 super.update(tpf);
rlm@11 242 dispatchAudio(tpf);
rlm@65 243 }
rlm@11 244 }
rlm@11 245