Mercurial > jmeCapture
diff src/com/aurellem/capture/audio/AudioSendRenderer.java @ 11:8a6b1684f536
refactored.
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Thu, 27 Oct 2011 02:27:02 -0700 |
parents | |
children | 5249c8a9603c 9f58273090df |
line wrap: on
line diff
1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/src/com/aurellem/capture/audio/AudioSendRenderer.java Thu Oct 27 02:27:02 2011 -0700 1.3 @@ -0,0 +1,195 @@ 1.4 +package com.aurellem.capture.audio; 1.5 + 1.6 +import java.lang.reflect.Field; 1.7 +import java.nio.ByteBuffer; 1.8 +import java.util.HashMap; 1.9 +import java.util.Vector; 1.10 +import java.util.concurrent.CountDownLatch; 1.11 +import java.util.logging.Level; 1.12 +import java.util.logging.Logger; 1.13 + 1.14 +import org.lwjgl.LWJGLException; 1.15 +import org.lwjgl.openal.AL; 1.16 +import org.lwjgl.openal.AL10; 1.17 +import org.lwjgl.openal.ALCdevice; 1.18 +import org.lwjgl.openal.OpenALException; 1.19 + 1.20 +import com.aurellem.send.AudioSend; 1.21 +import com.jme3.audio.Listener; 1.22 +import com.jme3.audio.lwjgl.LwjglAudioRenderer; 1.23 +import com.jme3.math.Vector3f; 1.24 +import com.jme3.util.BufferUtils; 1.25 + 1.26 +public class AudioSendRenderer 1.27 + 1.28 + extends LwjglAudioRenderer implements MultiListener { 1.29 + 1.30 + private AudioSend audioSend; 1.31 + 1.32 + /** 1.33 + * Keeps track of all the listeners which have been registered so far. 1.34 + * The first element is <code>null</code>, which represents the zeroth 1.35 + * LWJGL listener which is created automatically. 1.36 + */ 1.37 + public Vector<Listener> listeners = new Vector<Listener>(); 1.38 + 1.39 + public void initialize(){ 1.40 + super.initialize(); 1.41 + listeners.add(null); 1.42 + } 1.43 + 1.44 + /** 1.45 + * This is to call the native methods which require the OpenAL device ID. 1.46 + * currently it is obtained through reflection. 1.47 + */ 1.48 + private long deviceID; 1.49 + 1.50 + /** 1.51 + * To ensure that <code>deviceID<code> and <code>listeners<code> are 1.52 + * properly initialized before any additional listeners are added. 1.53 + */ 1.54 + private CountDownLatch latch = new CountDownLatch(1); 1.55 + 1.56 + /** 1.57 + * Each listener (including the main LWJGL listener) can be registered 1.58 + * with a <code>SoundProcessor</code>, which this Renderer will call whenever 1.59 + * there is new audio data to be processed. 1.60 + */ 1.61 + public HashMap<Listener, SoundProcessor> soundProcessorMap = 1.62 + new HashMap<Listener, SoundProcessor>(); 1.63 + 1.64 + 1.65 + /** 1.66 + * Create a new slave context on the recorder device which will render all the 1.67 + * sounds in the main LWJGL context with respect to this listener. 1.68 + */ 1.69 + public void addListener(Listener l) { 1.70 + try {this.latch.await();} 1.71 + catch (InterruptedException e) {e.printStackTrace();} 1.72 + audioSend.addListener(); 1.73 + this.listeners.add(l); 1.74 + } 1.75 + 1.76 + /** 1.77 + * Whenever new data is rendered in the perspective of this listener, 1.78 + * this Renderer will send that data to the SoundProcessor of your choosing. 1.79 + */ 1.80 + public void registerSoundProcessor(Listener l, SoundProcessor sp) { 1.81 + this.soundProcessorMap.put(l, sp); 1.82 + } 1.83 + 1.84 + /** 1.85 + * Registers a SoundProcessor for the main LWJGL context. IF all you want to 1.86 + * do is record the sound you would normally hear in your application, then 1.87 + * this is the only method you have to worry about. 1.88 + */ 1.89 + public void registerSoundProcessor(SoundProcessor sp){ 1.90 + // register a sound processor for the default listener. 1.91 + this.soundProcessorMap.put(null, sp); 1.92 + } 1.93 + 1.94 + private static final Logger logger = 1.95 + Logger.getLogger(AudioSendRenderer.class.getName()); 1.96 + 1.97 + 1.98 + 1.99 + /** 1.100 + * Instead of taking whatever device is available on the system, this call 1.101 + * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited 1.102 + * capacity. For each listener, the device renders it not to the sound device, but 1.103 + * instead to buffers which it makes available via JNI. 1.104 + */ 1.105 + public void initInThread(){ 1.106 + try{ 1.107 + if (!AL.isCreated()){ 1.108 + AL.create("Multiple Audio Send", 44100, 60, false); 1.109 + } 1.110 + }catch (OpenALException ex){ 1.111 + logger.log(Level.SEVERE, "Failed to load audio library", ex); 1.112 + System.exit(1); 1.113 + return; 1.114 + }catch (LWJGLException ex){ 1.115 + logger.log(Level.SEVERE, "Failed to load audio library", ex); 1.116 + System.exit(1); 1.117 + return; 1.118 + } 1.119 + super.initInThread(); 1.120 + 1.121 + ALCdevice device = AL.getDevice(); 1.122 + 1.123 + // RLM: use reflection to grab the ID of our device for use later. 1.124 + try { 1.125 + Field deviceIDField; 1.126 + deviceIDField = ALCdevice.class.getDeclaredField("device"); 1.127 + deviceIDField.setAccessible(true); 1.128 + try {deviceID = (Long)deviceIDField.get(device);} 1.129 + catch (IllegalArgumentException e) {e.printStackTrace();} 1.130 + catch (IllegalAccessException e) {e.printStackTrace();} 1.131 + deviceIDField.setAccessible(false);} 1.132 + catch (SecurityException e) {e.printStackTrace();} 1.133 + catch (NoSuchFieldException e) {e.printStackTrace();} 1.134 + 1.135 + this.audioSend = new AudioSend(this.deviceID); 1.136 + 1.137 + // The LWJGL context must be established as the master context before 1.138 + // any other listeners can be created on this device. 1.139 + audioSend.initDevice(); 1.140 + // Now, everything is initialized, and it is safe to add more listeners. 1.141 + latch.countDown(); 1.142 + } 1.143 + 1.144 + 1.145 + public void cleanup(){ 1.146 + for(SoundProcessor sp : this.soundProcessorMap.values()){ 1.147 + sp.cleanup(); 1.148 + } 1.149 + super.cleanup(); 1.150 + } 1.151 + 1.152 + public void updateAllListeners(){ 1.153 + for (int i = 0; i < this.listeners.size(); i++){ 1.154 + Listener lis = this.listeners.get(i); 1.155 + if (null != lis){ 1.156 + Vector3f location = lis.getLocation(); 1.157 + Vector3f velocity = lis.getVelocity(); 1.158 + Vector3f orientation = lis.getUp(); 1.159 + float gain = lis.getVolume(); 1.160 + audioSend.setNthListener3f(AL10.AL_POSITION, 1.161 + location.x, location.y, location.z, i); 1.162 + audioSend.setNthListener3f(AL10.AL_VELOCITY, 1.163 + velocity.x, velocity.y, velocity.z, i); 1.164 + audioSend.setNthListener3f(AL10.AL_ORIENTATION, 1.165 + orientation.x, orientation.y, orientation.z, i); 1.166 + audioSend.setNthListenerf(AL10.AL_GAIN, gain, i); 1.167 + } 1.168 + } 1.169 + } 1.170 + 1.171 + 1.172 + public final static int BYTES_PER_SAMPLE = 4; 1.173 + private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 1.174 + 1.175 + public void dispatchAudio(float tpf){ 1.176 + int samplesToGet = (int) (tpf * 44100); 1.177 + try {latch.await();} 1.178 + catch (InterruptedException e) {e.printStackTrace();} 1.179 + audioSend.step(samplesToGet); 1.180 + updateAllListeners(); 1.181 + 1.182 + for (int i = 0; i < this.listeners.size(); i++){ 1.183 + buffer.clear(); 1.184 + audioSend.getSamples(buffer, samplesToGet, i); 1.185 + SoundProcessor sp = 1.186 + this.soundProcessorMap.get(this.listeners.get(i)); 1.187 + if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} 1.188 + } 1.189 + 1.190 + } 1.191 + 1.192 + public void update(float tpf){ 1.193 + super.update(tpf); 1.194 + dispatchAudio(tpf); 1.195 + } 1.196 + 1.197 +} 1.198 +