view src/com/aurellem/capture/AudioSend.java @ 4:edaa7e7806e4

migrated IsoTimer
author Robert McIntyre <rlm@mit.edu>
date Tue, 25 Oct 2011 12:03:01 -0700
parents a92de00f0414
children
line wrap: on
line source
1 package com.aurellem.capture;
3 import java.lang.reflect.Field;
4 import java.nio.ByteBuffer;
5 import java.util.HashMap;
6 import java.util.Vector;
7 import java.util.concurrent.CountDownLatch;
8 import java.util.logging.Level;
9 import java.util.logging.Logger;
11 import org.lwjgl.LWJGLException;
12 import org.lwjgl.openal.AL;
13 import org.lwjgl.openal.AL10;
14 import org.lwjgl.openal.ALCdevice;
15 import org.lwjgl.openal.OpenALException;
17 import com.jme3.audio.Listener;
18 import com.jme3.audio.lwjgl.LwjglAudioRenderer;
19 import com.jme3.math.Vector3f;
20 import com.jme3.util.BufferUtils;
22 public class AudioSend
23 extends LwjglAudioRenderer implements MultiListener {
25 /**
26 * Keeps track of all the listeners which have been registered so far.
27 * The first element is <code>null</code>, which represents the zeroth
28 * LWJGL listener which is created automatically.
29 */
30 public Vector<Listener> listeners = new Vector<Listener>();
32 public void initialize(){
33 super.initialize();
34 listeners.add(null);
35 }
37 /**
38 * This is to call the native methods which require the OpenAL device ID.
39 * currently it is obtained through reflection.
40 */
41 private long deviceID;
43 /**
44 * To ensure that <code>deviceID<code> and <code>listeners<code> are
45 * properly initialized before any additional listeners are added.
46 */
47 private CountDownLatch latch = new CountDownLatch(1);
49 private void waitForInit(){
50 try {latch.await();}
51 catch (InterruptedException e) {e.printStackTrace();}
52 }
54 /**
55 * Each listener (including the main LWJGL listener) can be registered
56 * with a <code>SoundProcessor</code>, which this Renderer will call whenever
57 * there is new audio data to be processed.
58 */
59 public HashMap<Listener, SoundProcessor> soundProcessorMap =
60 new HashMap<Listener, SoundProcessor>();
63 /**
64 * Create a new slave context on the recorder device which will render all the
65 * sounds in the main LWJGL context with respect to this listener.
66 */
67 public void addListener(Listener l) {
68 try {this.latch.await();}
69 catch (InterruptedException e) {e.printStackTrace();}
70 this.addListener();
71 this.listeners.add(l);
72 }
74 /**
75 * Whenever new data is rendered in the perspective of this listener,
76 * this Renderer will send that data to the SoundProcessor of your choosing.
77 */
78 public void registerSoundProcessor(Listener l, SoundProcessor sp) {
79 this.soundProcessorMap.put(l, sp);
80 }
82 /**
83 * Registers a SoundProcessor for the main LWJGL context. IF all you want to
84 * do is record the sound you would normally hear in your application, then
85 * this is the only method you have to worry about.
86 */
87 public void registerSoundProcessor(SoundProcessor sp){
88 // register a sound processor for the default listener.
89 this.soundProcessorMap.put(null, sp);
90 }
92 private static final Logger logger =
93 Logger.getLogger(AudioSend.class.getName());
96 //////////// Native Methods
98 /** This establishes the LWJGL context as the context which will be copies to all
99 * other contexts. It must be called before any calls to <code>addListener();</code>
100 */
101 public void initDevice(){
102 ninitDevice(this.deviceID);}
103 public static native void ninitDevice(long device);
105 /**
106 * The send device does not automatically process sound. This step function will cause
107 * the desired number of samples to be processed for each listener. The results will then
108 * be available via calls to <code>getSamples()</code> for each listener.
109 * @param samples
110 */
111 public void step(int samples){
112 nstep(this.deviceID, samples);}
113 public static native void nstep(long device, int samples);
115 /**
116 * Retrieve the final rendered sound for a particular listener. <code>contextNum == 0</code>
117 * is the main LWJGL context.
118 * @param buffer
119 * @param samples
120 * @param contextNum
121 */
122 public void getSamples(ByteBuffer buffer, int samples, int contextNum){
123 ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);}
124 public static native void ngetSamples(
125 long device, ByteBuffer buffer, int position, int samples, int contextNum);
127 /**
128 * Create an additional listener on the recorder device. The device itself will manage
129 * this listener and synchronize it with the main LWJGL context. Processed sound samples
130 * for this listener will be available via a call to <code>getSamples()</code> with
131 * <code>contextNum</code> equal to the number of times this method has been called.
132 */
133 public void addListener(){naddListener(this.deviceID);}
134 public static native void naddListener(long device);
136 /**
137 * This will internally call <code>alListener3f<code> in the appropriate slave context and update
138 * that context's listener's parameters. Calling this for a number greater than the current
139 * number of slave contexts will have no effect.
140 * @param pname
141 * @param v1
142 * @param v2
143 * @param v3
144 * @param contextNum
145 */
146 public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){
147 nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);}
148 public static native void
149 nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum);
151 /**
152 * This will internally call <code>alListenerf<code> in the appropriate slave context and update
153 * that context's listener's parameters. Calling this for a number greater than the current
154 * number of slave contexts will have no effect.
155 * @param pname
156 * @param v1
157 * @param contextNum
158 */
159 public void setNthListenerf(int pname, float v1, int contextNum){
160 nsetNthListenerf(pname, v1, this.deviceID, contextNum);}
161 public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum);
163 /**
164 * Instead of taking whatever device is available on the system, this call
165 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited
166 * capacity. For each listener, the device renders it not to the sound device, but
167 * instead to buffers which it makes available via JNI.
168 */
169 public void initInThread(){
170 try{
171 if (!AL.isCreated()){
172 AL.create("Multiple Audio Send", 44100, 60, false);
173 }
174 }catch (OpenALException ex){
175 logger.log(Level.SEVERE, "Failed to load audio library", ex);
176 System.exit(1);
177 return;
178 }catch (LWJGLException ex){
179 logger.log(Level.SEVERE, "Failed to load audio library", ex);
180 System.exit(1);
181 return;
182 }
183 super.initInThread();
185 ALCdevice device = AL.getDevice();
187 // RLM: use reflection to grab the ID of our device for use later.
188 try {
189 Field deviceIDField;
190 deviceIDField = ALCdevice.class.getDeclaredField("device");
191 deviceIDField.setAccessible(true);
192 try {deviceID = (Long)deviceIDField.get(device);}
193 catch (IllegalArgumentException e) {e.printStackTrace();}
194 catch (IllegalAccessException e) {e.printStackTrace();}
195 deviceIDField.setAccessible(false);}
196 catch (SecurityException e) {e.printStackTrace();}
197 catch (NoSuchFieldException e) {e.printStackTrace();}
199 // the LWJGL context must be established as the master context before
200 // any other listeners can be created on this device.
201 initDevice();
202 // Now, everything is initialized, and it is safe to add more listeners.
203 latch.countDown();
204 }
207 public void cleanup(){
208 for(SoundProcessor sp : this.soundProcessorMap.values()){
209 sp.cleanup();
210 }
211 super.cleanup();
212 }
214 public void updateAllListeners(){
215 for (int i = 0; i < this.listeners.size(); i++){
216 Listener lis = this.listeners.get(i);
217 if (null != lis){
218 Vector3f location = lis.getLocation();
219 Vector3f velocity = lis.getVelocity();
220 Vector3f orientation = lis.getUp();
221 float gain = lis.getVolume();
222 setNthListener3f(AL10.AL_POSITION,
223 location.x, location.y, location.z, i);
224 setNthListener3f(AL10.AL_VELOCITY,
225 velocity.x, velocity.y, velocity.z, i);
226 setNthListener3f(AL10.AL_ORIENTATION,
227 orientation.x, orientation.y, orientation.z, i);
228 setNthListenerf(AL10.AL_GAIN, gain, i);
229 }
230 }
231 }
234 public final static int BYTES_PER_SAMPLE = 4;
235 private ByteBuffer buffer = BufferUtils.createByteBuffer(4096);
237 public void dispatchAudio(float tpf){
238 int samplesToGet = (int) (tpf * 44100);
239 try {latch.await();}
240 catch (InterruptedException e) {e.printStackTrace();}
241 step(samplesToGet);
242 updateAllListeners();
244 for (int i = 0; i < this.listeners.size(); i++){
245 buffer.clear();
246 this.getSamples(buffer, samplesToGet, i);
247 SoundProcessor sp =
248 this.soundProcessorMap.get(this.listeners.get(i));
249 if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);}
250 }
252 }
254 public void update(float tpf){
255 super.update(tpf);
256 dispatchAudio(tpf);
257 }
259 }