Mercurial > jmeCapture
view src/com/aurellem/capture/audio/AudioSendRenderer.java @ 53:3dc1f15e1e13
going to write main documentation
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sat, 03 Dec 2011 13:54:47 -0600 |
parents | 2f129118e2d6 |
children | 23e3df41db3c |
line wrap: on
line source
1 package com.aurellem.capture.audio;3 import java.io.IOException;4 import java.lang.reflect.Field;5 import java.nio.ByteBuffer;6 import java.util.HashMap;7 import java.util.Vector;8 import java.util.concurrent.CountDownLatch;9 import java.util.logging.Level;10 import java.util.logging.Logger;12 import javax.sound.sampled.AudioFormat;14 import org.lwjgl.LWJGLException;15 import org.lwjgl.openal.AL;16 import org.lwjgl.openal.AL10;17 import org.lwjgl.openal.ALCdevice;18 import org.lwjgl.openal.OpenALException;20 import com.aurellem.send.AudioSend;21 import com.jme3.audio.Listener;22 import com.jme3.audio.lwjgl.LwjglAudioRenderer;23 import com.jme3.math.Vector3f;24 import com.jme3.system.JmeSystem;25 import com.jme3.system.Natives;26 import com.jme3.util.BufferUtils;28 public class AudioSendRenderer30 extends LwjglAudioRenderer implements MultiListener {32 private AudioSend audioSend;33 private AudioFormat outFormat;// = new AudioFormat(44100.0f, 32, 1, true, false);35 /**36 * Keeps track of all the listeners which have been registered so far.37 * The first element is <code>null</code>, which represents the zeroth38 * LWJGL listener which is created automatically.39 */40 public Vector<Listener> listeners = new Vector<Listener>();42 public void initialize(){43 super.initialize();44 listeners.add(null);45 }47 /**48 * This is to call the native methods which require the OpenAL device ID.49 * currently it is obtained through reflection.50 */51 private long deviceID;53 /**54 * To ensure that <code>deviceID<code> and <code>listeners<code> are55 * properly initialized before any additional listeners are added.56 */57 private CountDownLatch latch = new CountDownLatch(1);59 /**60 * Each listener (including the main LWJGL listener) can be registered61 * with a <code>SoundProcessor</code>, which this Renderer will call whenever62 * there is new audio data to be processed.63 */64 public HashMap<Listener, SoundProcessor> soundProcessorMap =65 new HashMap<Listener, SoundProcessor>();68 /**69 * Create a new slave context on the recorder device which will render all the70 * sounds in the main LWJGL context with respect to this listener.71 */72 public void addListener(Listener l) {73 try {this.latch.await();}74 catch (InterruptedException e) {e.printStackTrace();}75 audioSend.addListener();76 this.listeners.add(l);77 l.setRenderer(this);78 }80 /**81 * Whenever new data is rendered in the perspective of this listener,82 * this Renderer will send that data to the SoundProcessor of your choosing.83 */84 public void registerSoundProcessor(Listener l, SoundProcessor sp) {85 this.soundProcessorMap.put(l, sp);86 }88 /**89 * Registers a SoundProcessor for the main LWJGL context. IF all you want to90 * do is record the sound you would normally hear in your application, then91 * this is the only method you have to worry about.92 */93 public void registerSoundProcessor(SoundProcessor sp){94 // register a sound processor for the default listener.95 this.soundProcessorMap.put(null, sp);96 }98 private static final Logger logger =99 Logger.getLogger(AudioSendRenderer.class.getName());103 /**104 * Instead of taking whatever device is available on the system, this call105 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited106 * capacity. For each listener, the device renders it not to the sound device, but107 * instead to buffers which it makes available via JNI.108 */109 public void initInThread(){111 try{112 switch (JmeSystem.getPlatform()){113 case Windows64:114 Natives.extractNativeLib("windows/audioSend", "OpenAL64", true, true);115 break;116 case Windows32:117 Natives.extractNativeLib("windows/audioSend", "OpenAL32", true, true);118 break;119 case Linux64:120 Natives.extractNativeLib("linux/audioSend", "openal64", true, true);121 break;122 case Linux32:123 Natives.extractNativeLib("linux/audioSend", "openal", true, true);124 break;125 }126 }127 catch (IOException ex) {ex.printStackTrace();}129 try{130 if (!AL.isCreated()){131 AL.create("Multiple Audio Send", 44100, 60, false);132 }133 }catch (OpenALException ex){134 logger.log(Level.SEVERE, "Failed to load audio library", ex);135 System.exit(1);136 return;137 }catch (LWJGLException ex){138 logger.log(Level.SEVERE, "Failed to load audio library", ex);139 System.exit(1);140 return;141 }142 super.initInThread();144 ALCdevice device = AL.getDevice();146 // RLM: use reflection to grab the ID of our device for use later.147 try {148 Field deviceIDField;149 deviceIDField = ALCdevice.class.getDeclaredField("device");150 deviceIDField.setAccessible(true);151 try {deviceID = (Long)deviceIDField.get(device);}152 catch (IllegalArgumentException e) {e.printStackTrace();}153 catch (IllegalAccessException e) {e.printStackTrace();}154 deviceIDField.setAccessible(false);}155 catch (SecurityException e) {e.printStackTrace();}156 catch (NoSuchFieldException e) {e.printStackTrace();}158 this.audioSend = new AudioSend(this.deviceID);159 this.outFormat = audioSend.getAudioFormat();160 initBuffer();162 // The LWJGL context must be established as the master context before163 // any other listeners can be created on this device.164 audioSend.initDevice();165 // Now, everything is initialized, and it is safe to add more listeners.166 latch.countDown();167 }170 public void cleanup(){171 for(SoundProcessor sp : this.soundProcessorMap.values()){172 sp.cleanup();173 }174 super.cleanup();175 }177 public void updateAllListeners(){178 for (int i = 0; i < this.listeners.size(); i++){179 Listener lis = this.listeners.get(i);180 if (null != lis){181 Vector3f location = lis.getLocation();182 Vector3f velocity = lis.getVelocity();183 Vector3f orientation = lis.getUp();184 float gain = lis.getVolume();185 audioSend.setNthListener3f(AL10.AL_POSITION,186 location.x, location.y, location.z, i);187 audioSend.setNthListener3f(AL10.AL_VELOCITY,188 velocity.x, velocity.y, velocity.z, i);189 audioSend.setNthListener3f(AL10.AL_ORIENTATION,190 orientation.x, orientation.y, orientation.z, i);191 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);192 }193 }194 }197 private ByteBuffer buffer;;199 public static final int MIN_FRAMERATE = 10;201 private void initBuffer(){202 int bufferSize = (int)(this.outFormat.getSampleRate() / ((float)MIN_FRAMERATE)) *203 this.outFormat.getFrameSize();204 this.buffer = BufferUtils.createByteBuffer(bufferSize);205 }206 /*208 */209 public void dispatchAudio(float tpf){211 int samplesToGet = (int) (tpf * outFormat.getSampleRate());212 try {latch.await();}213 catch (InterruptedException e) {e.printStackTrace();}214 audioSend.step(samplesToGet);215 updateAllListeners();217 for (int i = 0; i < this.listeners.size(); i++){218 buffer.clear();219 audioSend.getSamples(buffer, samplesToGet, i);220 SoundProcessor sp =221 this.soundProcessorMap.get(this.listeners.get(i));222 if (null != sp){sp.process(buffer, samplesToGet*outFormat.getFrameSize(), outFormat);}223 }225 }227 public void update(float tpf){228 super.update(tpf);229 dispatchAudio(tpf);230 }232 }