comparison src/com/aurellem/capture/audio/AudioSendRenderer.java @ 65:23e3df41db3c

reformatting for web
author Robert McIntyre <rlm@mit.edu>
date Sat, 11 Feb 2012 12:25:26 -0700
parents 2f129118e2d6
children
comparison
equal deleted inserted replaced
64:155c70b7e6de 65:23e3df41db3c
25 import com.jme3.system.Natives; 25 import com.jme3.system.Natives;
26 import com.jme3.util.BufferUtils; 26 import com.jme3.util.BufferUtils;
27 27
28 public class AudioSendRenderer 28 public class AudioSendRenderer
29 29
30 extends LwjglAudioRenderer implements MultiListener { 30 extends LwjglAudioRenderer implements MultiListener {
31 31
32 private AudioSend audioSend; 32 private AudioSend audioSend;
33 private AudioFormat outFormat;// = new AudioFormat(44100.0f, 32, 1, true, false); 33 private AudioFormat outFormat;
34 34
35 /** 35 /**
36 * Keeps track of all the listeners which have been registered so far. 36 * Keeps track of all the listeners which have been registered
37 * The first element is <code>null</code>, which represents the zeroth 37 * so far. The first element is <code>null</code>, which
38 * LWJGL listener which is created automatically. 38 * represents the zeroth LWJGL listener which is created
39 */ 39 * automatically.
40 public Vector<Listener> listeners = new Vector<Listener>(); 40 */
41 41 public Vector<Listener> listeners = new Vector<Listener>();
42 public void initialize(){ 42
43 super.initialize(); 43 public void initialize(){
44 listeners.add(null); 44 super.initialize();
45 } 45 listeners.add(null);
46 46 }
47 /** 47
48 * This is to call the native methods which require the OpenAL device ID. 48 /**
49 * currently it is obtained through reflection. 49 * This is to call the native methods which require the OpenAL
50 */ 50 * device ID. Currently it is obtained through reflection.
51 private long deviceID; 51 */
52 52 private long deviceID;
53 /** 53
54 * To ensure that <code>deviceID<code> and <code>listeners<code> are 54 /**
55 * properly initialized before any additional listeners are added. 55 * To ensure that <code>deviceID<code> and
56 */ 56 * <code>listeners<code> are properly initialized before any
57 private CountDownLatch latch = new CountDownLatch(1); 57 * additional listeners are added.
58 58 */
59 /** 59 private CountDownLatch latch = new CountDownLatch(1);
60 * Each listener (including the main LWJGL listener) can be registered 60
61 * with a <code>SoundProcessor</code>, which this Renderer will call whenever 61 /**
62 * there is new audio data to be processed. 62 * Each listener (including the main LWJGL listener) can be
63 */ 63 * registered with a <code>SoundProcessor</code>, which this
64 public HashMap<Listener, SoundProcessor> soundProcessorMap = 64 * Renderer will call whenever there is new audio data to be
65 new HashMap<Listener, SoundProcessor>(); 65 * processed.
66 66 */
67 67 public HashMap<Listener, SoundProcessor> soundProcessorMap =
68 /** 68 new HashMap<Listener, SoundProcessor>();
69 * Create a new slave context on the recorder device which will render all the 69
70 * sounds in the main LWJGL context with respect to this listener. 70 /**
71 */ 71 * Create a new slave context on the recorder device which
72 public void addListener(Listener l) { 72 * will render all the sounds in the main LWJGL context with
73 try {this.latch.await();} 73 * respect to this listener.
74 catch (InterruptedException e) {e.printStackTrace();} 74 */
75 audioSend.addListener(); 75 public void addListener(Listener l) {
76 this.listeners.add(l); 76 try {this.latch.await();}
77 l.setRenderer(this); 77 catch (InterruptedException e) {e.printStackTrace();}
78 } 78 audioSend.addListener();
79 79 this.listeners.add(l);
80 /** 80 l.setRenderer(this);
81 * Whenever new data is rendered in the perspective of this listener, 81 }
82 * this Renderer will send that data to the SoundProcessor of your choosing. 82
83 */ 83 /**
84 public void registerSoundProcessor(Listener l, SoundProcessor sp) { 84 * Whenever new data is rendered in the perspective of this
85 this.soundProcessorMap.put(l, sp); 85 * listener, this Renderer will send that data to the
86 } 86 * SoundProcessor of your choosing.
87 87 */
88 /** 88 public void registerSoundProcessor(Listener l, SoundProcessor sp) {
89 * Registers a SoundProcessor for the main LWJGL context. IF all you want to 89 this.soundProcessorMap.put(l, sp);
90 * do is record the sound you would normally hear in your application, then 90 }
91 * this is the only method you have to worry about. 91
92 */ 92 /**
93 public void registerSoundProcessor(SoundProcessor sp){ 93 * Registers a SoundProcessor for the main LWJGL context. Ig all
94 // register a sound processor for the default listener. 94 * you want to do is record the sound you would normally hear in
95 this.soundProcessorMap.put(null, sp); 95 * your application, then this is the only method you have to
96 } 96 * worry about.
97 97 */
98 private static final Logger logger = 98 public void registerSoundProcessor(SoundProcessor sp){
99 Logger.getLogger(AudioSendRenderer.class.getName()); 99 // register a sound processor for the default listener.
100 100 this.soundProcessorMap.put(null, sp);
101 101 }
102 102
103 /** 103 private static final Logger logger =
104 * Instead of taking whatever device is available on the system, this call 104 Logger.getLogger(AudioSendRenderer.class.getName());
105 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited 105
106 * capacity. For each listener, the device renders it not to the sound device, but 106 /**
107 * instead to buffers which it makes available via JNI. 107 * Instead of taking whatever device is available on the system,
108 */ 108 * this call creates the "Multiple Audio Send" device, which
109 public void initInThread(){ 109 * supports multiple listeners in a limited capacity. For each
110 110 * listener, the device renders it not to the sound device, but
111 try{ 111 * instead to buffers which it makes available via JNI.
112 switch (JmeSystem.getPlatform()){ 112 */
113 case Windows64: 113 public void initInThread(){
114 Natives.extractNativeLib("windows/audioSend", "OpenAL64", true, true); 114 try{
115 break; 115 switch (JmeSystem.getPlatform()){
116 case Windows32: 116 case Windows64:
117 Natives.extractNativeLib("windows/audioSend", "OpenAL32", true, true); 117 Natives.extractNativeLib("windows/audioSend",
118 break; 118 "OpenAL64", true, true);
119 case Linux64: 119 break;
120 Natives.extractNativeLib("linux/audioSend", "openal64", true, true); 120 case Windows32:
121 break; 121 Natives.extractNativeLib("windows/audioSend",
122 case Linux32: 122 "OpenAL32", true, true);
123 Natives.extractNativeLib("linux/audioSend", "openal", true, true); 123 break;
124 break; 124 case Linux64:
125 } 125 Natives.extractNativeLib("linux/audioSend",
126 } 126 "openal64", true, true);
127 catch (IOException ex) {ex.printStackTrace();} 127 break;
128 128 case Linux32:
129 try{ 129 Natives.extractNativeLib("linux/audioSend",
130 "openal", true, true);
131 break;
132 }
133 }
134 catch (IOException ex) {ex.printStackTrace();}
135
136 try{
130 if (!AL.isCreated()){ 137 if (!AL.isCreated()){
131 AL.create("Multiple Audio Send", 44100, 60, false); 138 AL.create("Multiple Audio Send", 44100, 60, false);
132 } 139 }
133 }catch (OpenALException ex){ 140 }catch (OpenALException ex){
134 logger.log(Level.SEVERE, "Failed to load audio library", ex); 141 logger.log(Level.SEVERE, "Failed to load audio library", ex);
137 }catch (LWJGLException ex){ 144 }catch (LWJGLException ex){
138 logger.log(Level.SEVERE, "Failed to load audio library", ex); 145 logger.log(Level.SEVERE, "Failed to load audio library", ex);
139 System.exit(1); 146 System.exit(1);
140 return; 147 return;
141 } 148 }
142 super.initInThread(); 149 super.initInThread();
143 150
144 ALCdevice device = AL.getDevice(); 151 ALCdevice device = AL.getDevice();
145 152
146 // RLM: use reflection to grab the ID of our device for use later. 153 // RLM: use reflection to grab the ID of our device for use
147 try { 154 // later.
148 Field deviceIDField; 155 try {
149 deviceIDField = ALCdevice.class.getDeclaredField("device"); 156 Field deviceIDField;
150 deviceIDField.setAccessible(true); 157 deviceIDField = ALCdevice.class.getDeclaredField("device");
151 try {deviceID = (Long)deviceIDField.get(device);} 158 deviceIDField.setAccessible(true);
152 catch (IllegalArgumentException e) {e.printStackTrace();} 159 try {deviceID = (Long)deviceIDField.get(device);}
153 catch (IllegalAccessException e) {e.printStackTrace();} 160 catch (IllegalArgumentException e) {e.printStackTrace();}
154 deviceIDField.setAccessible(false);} 161 catch (IllegalAccessException e) {e.printStackTrace();}
155 catch (SecurityException e) {e.printStackTrace();} 162 deviceIDField.setAccessible(false);}
156 catch (NoSuchFieldException e) {e.printStackTrace();} 163 catch (SecurityException e) {e.printStackTrace();}
157 164 catch (NoSuchFieldException e) {e.printStackTrace();}
158 this.audioSend = new AudioSend(this.deviceID); 165
159 this.outFormat = audioSend.getAudioFormat(); 166 this.audioSend = new AudioSend(this.deviceID);
160 initBuffer(); 167 this.outFormat = audioSend.getAudioFormat();
168 initBuffer();
161 169
162 // The LWJGL context must be established as the master context before 170 // The LWJGL context must be established as the master context
163 // any other listeners can be created on this device. 171 // before any other listeners can be created on this device.
164 audioSend.initDevice(); 172 audioSend.initDevice();
165 // Now, everything is initialized, and it is safe to add more listeners. 173 // Now, everything is initialized, and it is safe to add more
166 latch.countDown(); 174 // listeners.
167 } 175 latch.countDown();
168 176 }
169 177
170 public void cleanup(){ 178 public void cleanup(){
171 for(SoundProcessor sp : this.soundProcessorMap.values()){ 179 for(SoundProcessor sp : this.soundProcessorMap.values()){
172 sp.cleanup(); 180 sp.cleanup();
173 } 181 }
174 super.cleanup(); 182 super.cleanup();
175 } 183 }
176 184
177 public void updateAllListeners(){ 185 public void updateAllListeners(){
178 for (int i = 0; i < this.listeners.size(); i++){ 186 for (int i = 0; i < this.listeners.size(); i++){
179 Listener lis = this.listeners.get(i); 187 Listener lis = this.listeners.get(i);
180 if (null != lis){ 188 if (null != lis){
181 Vector3f location = lis.getLocation(); 189 Vector3f location = lis.getLocation();
182 Vector3f velocity = lis.getVelocity(); 190 Vector3f velocity = lis.getVelocity();
183 Vector3f orientation = lis.getUp(); 191 Vector3f orientation = lis.getUp();
184 float gain = lis.getVolume(); 192 float gain = lis.getVolume();
185 audioSend.setNthListener3f(AL10.AL_POSITION, 193 audioSend.setNthListener3f
186 location.x, location.y, location.z, i); 194 (AL10.AL_POSITION,
187 audioSend.setNthListener3f(AL10.AL_VELOCITY, 195 location.x, location.y, location.z, i);
188 velocity.x, velocity.y, velocity.z, i); 196 audioSend.setNthListener3f
189 audioSend.setNthListener3f(AL10.AL_ORIENTATION, 197 (AL10.AL_VELOCITY,
190 orientation.x, orientation.y, orientation.z, i); 198 velocity.x, velocity.y, velocity.z, i);
191 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i); 199 audioSend.setNthListener3f
192 } 200 (AL10.AL_ORIENTATION,
193 } 201 orientation.x, orientation.y, orientation.z, i);
194 } 202 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);
195 203 }
196 204 }
197 private ByteBuffer buffer;; 205 }
198 206
199 public static final int MIN_FRAMERATE = 10; 207 private ByteBuffer buffer;;
200 208
201 private void initBuffer(){ 209 public static final int MIN_FRAMERATE = 10;
202 int bufferSize = (int)(this.outFormat.getSampleRate() / ((float)MIN_FRAMERATE)) * 210
203 this.outFormat.getFrameSize(); 211 private void initBuffer(){
204 this.buffer = BufferUtils.createByteBuffer(bufferSize); 212 int bufferSize =
205 } 213 (int)(this.outFormat.getSampleRate() /
206 /* 214 ((float)MIN_FRAMERATE)) *
207 215 this.outFormat.getFrameSize();
208 */ 216
209 public void dispatchAudio(float tpf){ 217 this.buffer = BufferUtils.createByteBuffer(bufferSize);
210 218 }
211 int samplesToGet = (int) (tpf * outFormat.getSampleRate()); 219
212 try {latch.await();} 220 public void dispatchAudio(float tpf){
213 catch (InterruptedException e) {e.printStackTrace();} 221
214 audioSend.step(samplesToGet); 222 int samplesToGet = (int) (tpf * outFormat.getSampleRate());
215 updateAllListeners(); 223 try {latch.await();}
216 224 catch (InterruptedException e) {e.printStackTrace();}
217 for (int i = 0; i < this.listeners.size(); i++){ 225 audioSend.step(samplesToGet);
218 buffer.clear(); 226 updateAllListeners();
219 audioSend.getSamples(buffer, samplesToGet, i); 227
220 SoundProcessor sp = 228 for (int i = 0; i < this.listeners.size(); i++){
221 this.soundProcessorMap.get(this.listeners.get(i)); 229 buffer.clear();
222 if (null != sp){sp.process(buffer, samplesToGet*outFormat.getFrameSize(), outFormat);} 230 audioSend.getSamples(buffer, samplesToGet, i);
223 } 231 SoundProcessor sp =
224 232 this.soundProcessorMap.get(this.listeners.get(i));
225 } 233 if (null != sp){
226 234 sp.process
227 public void update(float tpf){ 235 (buffer,
228 super.update(tpf); 236 samplesToGet*outFormat.getFrameSize(), outFormat);}
237 }
238 }
239
240 public void update(float tpf){
241 super.update(tpf);
229 dispatchAudio(tpf); 242 dispatchAudio(tpf);
230 } 243 }
231
232 } 244 }
233 245