Mercurial > jmeCapture
comparison src/com/aurellem/capture/audio/AudioSendRenderer.java @ 11:8a6b1684f536
refactored.
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Thu, 27 Oct 2011 02:27:02 -0700 |
parents | |
children | 5249c8a9603c 9f58273090df |
comparison
equal
deleted
inserted
replaced
10:4c5fc53778c1 | 11:8a6b1684f536 |
---|---|
1 package com.aurellem.capture.audio; | |
2 | |
3 import java.lang.reflect.Field; | |
4 import java.nio.ByteBuffer; | |
5 import java.util.HashMap; | |
6 import java.util.Vector; | |
7 import java.util.concurrent.CountDownLatch; | |
8 import java.util.logging.Level; | |
9 import java.util.logging.Logger; | |
10 | |
11 import org.lwjgl.LWJGLException; | |
12 import org.lwjgl.openal.AL; | |
13 import org.lwjgl.openal.AL10; | |
14 import org.lwjgl.openal.ALCdevice; | |
15 import org.lwjgl.openal.OpenALException; | |
16 | |
17 import com.aurellem.send.AudioSend; | |
18 import com.jme3.audio.Listener; | |
19 import com.jme3.audio.lwjgl.LwjglAudioRenderer; | |
20 import com.jme3.math.Vector3f; | |
21 import com.jme3.util.BufferUtils; | |
22 | |
23 public class AudioSendRenderer | |
24 | |
25 extends LwjglAudioRenderer implements MultiListener { | |
26 | |
27 private AudioSend audioSend; | |
28 | |
29 /** | |
30 * Keeps track of all the listeners which have been registered so far. | |
31 * The first element is <code>null</code>, which represents the zeroth | |
32 * LWJGL listener which is created automatically. | |
33 */ | |
34 public Vector<Listener> listeners = new Vector<Listener>(); | |
35 | |
36 public void initialize(){ | |
37 super.initialize(); | |
38 listeners.add(null); | |
39 } | |
40 | |
41 /** | |
42 * This is to call the native methods which require the OpenAL device ID. | |
43 * currently it is obtained through reflection. | |
44 */ | |
45 private long deviceID; | |
46 | |
47 /** | |
48 * To ensure that <code>deviceID<code> and <code>listeners<code> are | |
49 * properly initialized before any additional listeners are added. | |
50 */ | |
51 private CountDownLatch latch = new CountDownLatch(1); | |
52 | |
53 /** | |
54 * Each listener (including the main LWJGL listener) can be registered | |
55 * with a <code>SoundProcessor</code>, which this Renderer will call whenever | |
56 * there is new audio data to be processed. | |
57 */ | |
58 public HashMap<Listener, SoundProcessor> soundProcessorMap = | |
59 new HashMap<Listener, SoundProcessor>(); | |
60 | |
61 | |
62 /** | |
63 * Create a new slave context on the recorder device which will render all the | |
64 * sounds in the main LWJGL context with respect to this listener. | |
65 */ | |
66 public void addListener(Listener l) { | |
67 try {this.latch.await();} | |
68 catch (InterruptedException e) {e.printStackTrace();} | |
69 audioSend.addListener(); | |
70 this.listeners.add(l); | |
71 } | |
72 | |
73 /** | |
74 * Whenever new data is rendered in the perspective of this listener, | |
75 * this Renderer will send that data to the SoundProcessor of your choosing. | |
76 */ | |
77 public void registerSoundProcessor(Listener l, SoundProcessor sp) { | |
78 this.soundProcessorMap.put(l, sp); | |
79 } | |
80 | |
81 /** | |
82 * Registers a SoundProcessor for the main LWJGL context. IF all you want to | |
83 * do is record the sound you would normally hear in your application, then | |
84 * this is the only method you have to worry about. | |
85 */ | |
86 public void registerSoundProcessor(SoundProcessor sp){ | |
87 // register a sound processor for the default listener. | |
88 this.soundProcessorMap.put(null, sp); | |
89 } | |
90 | |
91 private static final Logger logger = | |
92 Logger.getLogger(AudioSendRenderer.class.getName()); | |
93 | |
94 | |
95 | |
96 /** | |
97 * Instead of taking whatever device is available on the system, this call | |
98 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited | |
99 * capacity. For each listener, the device renders it not to the sound device, but | |
100 * instead to buffers which it makes available via JNI. | |
101 */ | |
102 public void initInThread(){ | |
103 try{ | |
104 if (!AL.isCreated()){ | |
105 AL.create("Multiple Audio Send", 44100, 60, false); | |
106 } | |
107 }catch (OpenALException ex){ | |
108 logger.log(Level.SEVERE, "Failed to load audio library", ex); | |
109 System.exit(1); | |
110 return; | |
111 }catch (LWJGLException ex){ | |
112 logger.log(Level.SEVERE, "Failed to load audio library", ex); | |
113 System.exit(1); | |
114 return; | |
115 } | |
116 super.initInThread(); | |
117 | |
118 ALCdevice device = AL.getDevice(); | |
119 | |
120 // RLM: use reflection to grab the ID of our device for use later. | |
121 try { | |
122 Field deviceIDField; | |
123 deviceIDField = ALCdevice.class.getDeclaredField("device"); | |
124 deviceIDField.setAccessible(true); | |
125 try {deviceID = (Long)deviceIDField.get(device);} | |
126 catch (IllegalArgumentException e) {e.printStackTrace();} | |
127 catch (IllegalAccessException e) {e.printStackTrace();} | |
128 deviceIDField.setAccessible(false);} | |
129 catch (SecurityException e) {e.printStackTrace();} | |
130 catch (NoSuchFieldException e) {e.printStackTrace();} | |
131 | |
132 this.audioSend = new AudioSend(this.deviceID); | |
133 | |
134 // The LWJGL context must be established as the master context before | |
135 // any other listeners can be created on this device. | |
136 audioSend.initDevice(); | |
137 // Now, everything is initialized, and it is safe to add more listeners. | |
138 latch.countDown(); | |
139 } | |
140 | |
141 | |
142 public void cleanup(){ | |
143 for(SoundProcessor sp : this.soundProcessorMap.values()){ | |
144 sp.cleanup(); | |
145 } | |
146 super.cleanup(); | |
147 } | |
148 | |
149 public void updateAllListeners(){ | |
150 for (int i = 0; i < this.listeners.size(); i++){ | |
151 Listener lis = this.listeners.get(i); | |
152 if (null != lis){ | |
153 Vector3f location = lis.getLocation(); | |
154 Vector3f velocity = lis.getVelocity(); | |
155 Vector3f orientation = lis.getUp(); | |
156 float gain = lis.getVolume(); | |
157 audioSend.setNthListener3f(AL10.AL_POSITION, | |
158 location.x, location.y, location.z, i); | |
159 audioSend.setNthListener3f(AL10.AL_VELOCITY, | |
160 velocity.x, velocity.y, velocity.z, i); | |
161 audioSend.setNthListener3f(AL10.AL_ORIENTATION, | |
162 orientation.x, orientation.y, orientation.z, i); | |
163 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i); | |
164 } | |
165 } | |
166 } | |
167 | |
168 | |
169 public final static int BYTES_PER_SAMPLE = 4; | |
170 private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); | |
171 | |
172 public void dispatchAudio(float tpf){ | |
173 int samplesToGet = (int) (tpf * 44100); | |
174 try {latch.await();} | |
175 catch (InterruptedException e) {e.printStackTrace();} | |
176 audioSend.step(samplesToGet); | |
177 updateAllListeners(); | |
178 | |
179 for (int i = 0; i < this.listeners.size(); i++){ | |
180 buffer.clear(); | |
181 audioSend.getSamples(buffer, samplesToGet, i); | |
182 SoundProcessor sp = | |
183 this.soundProcessorMap.get(this.listeners.get(i)); | |
184 if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} | |
185 } | |
186 | |
187 } | |
188 | |
189 public void update(float tpf){ | |
190 super.update(tpf); | |
191 dispatchAudio(tpf); | |
192 } | |
193 | |
194 } | |
195 |