Mercurial > jmeCapture
view src/com/aurellem/capture/audio/AudioSendRenderer.java @ 73:877ae4b2993c tip
merge laptop changes.
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Mon, 10 Mar 2014 18:58:08 -0400 |
parents | 23e3df41db3c |
children |
line wrap: on
line source
1 package com.aurellem.capture.audio;3 import java.io.IOException;4 import java.lang.reflect.Field;5 import java.nio.ByteBuffer;6 import java.util.HashMap;7 import java.util.Vector;8 import java.util.concurrent.CountDownLatch;9 import java.util.logging.Level;10 import java.util.logging.Logger;12 import javax.sound.sampled.AudioFormat;14 import org.lwjgl.LWJGLException;15 import org.lwjgl.openal.AL;16 import org.lwjgl.openal.AL10;17 import org.lwjgl.openal.ALCdevice;18 import org.lwjgl.openal.OpenALException;20 import com.aurellem.send.AudioSend;21 import com.jme3.audio.Listener;22 import com.jme3.audio.lwjgl.LwjglAudioRenderer;23 import com.jme3.math.Vector3f;24 import com.jme3.system.JmeSystem;25 import com.jme3.system.Natives;26 import com.jme3.util.BufferUtils;28 public class AudioSendRenderer30 extends LwjglAudioRenderer implements MultiListener {32 private AudioSend audioSend;33 private AudioFormat outFormat;35 /**36 * Keeps track of all the listeners which have been registered37 * so far. The first element is <code>null</code>, which38 * represents the zeroth LWJGL listener which is created39 * automatically.40 */41 public Vector<Listener> listeners = new Vector<Listener>();43 public void initialize(){44 super.initialize();45 listeners.add(null);46 }48 /**49 * This is to call the native methods which require the OpenAL50 * device ID. Currently it is obtained through reflection.51 */52 private long deviceID;54 /**55 * To ensure that <code>deviceID<code> and56 * <code>listeners<code> are properly initialized before any57 * additional listeners are added.58 */59 private CountDownLatch latch = new CountDownLatch(1);61 /**62 * Each listener (including the main LWJGL listener) can be63 * registered with a <code>SoundProcessor</code>, which this64 * Renderer will call whenever there is new audio data to be65 * processed.66 */67 public HashMap<Listener, SoundProcessor> soundProcessorMap =68 new HashMap<Listener, SoundProcessor>();70 /**71 * Create a new slave context on the recorder device which72 * will render all the sounds in the main LWJGL context with73 * respect to this listener.74 */75 public void addListener(Listener l) {76 try {this.latch.await();}77 catch (InterruptedException e) {e.printStackTrace();}78 audioSend.addListener();79 this.listeners.add(l);80 l.setRenderer(this);81 }83 /**84 * Whenever new data is rendered in the perspective of this85 * listener, this Renderer will send that data to the86 * SoundProcessor of your choosing.87 */88 public void registerSoundProcessor(Listener l, SoundProcessor sp) {89 this.soundProcessorMap.put(l, sp);90 }92 /**93 * Registers a SoundProcessor for the main LWJGL context. Ig all94 * you want to do is record the sound you would normally hear in95 * your application, then this is the only method you have to96 * worry about.97 */98 public void registerSoundProcessor(SoundProcessor sp){99 // register a sound processor for the default listener.100 this.soundProcessorMap.put(null, sp);101 }103 private static final Logger logger =104 Logger.getLogger(AudioSendRenderer.class.getName());106 /**107 * Instead of taking whatever device is available on the system,108 * this call creates the "Multiple Audio Send" device, which109 * supports multiple listeners in a limited capacity. For each110 * listener, the device renders it not to the sound device, but111 * instead to buffers which it makes available via JNI.112 */113 public void initInThread(){114 try{115 switch (JmeSystem.getPlatform()){116 case Windows64:117 Natives.extractNativeLib("windows/audioSend",118 "OpenAL64", true, true);119 break;120 case Windows32:121 Natives.extractNativeLib("windows/audioSend",122 "OpenAL32", true, true);123 break;124 case Linux64:125 Natives.extractNativeLib("linux/audioSend",126 "openal64", true, true);127 break;128 case Linux32:129 Natives.extractNativeLib("linux/audioSend",130 "openal", true, true);131 break;132 }133 }134 catch (IOException ex) {ex.printStackTrace();}136 try{137 if (!AL.isCreated()){138 AL.create("Multiple Audio Send", 44100, 60, false);139 }140 }catch (OpenALException ex){141 logger.log(Level.SEVERE, "Failed to load audio library", ex);142 System.exit(1);143 return;144 }catch (LWJGLException ex){145 logger.log(Level.SEVERE, "Failed to load audio library", ex);146 System.exit(1);147 return;148 }149 super.initInThread();151 ALCdevice device = AL.getDevice();153 // RLM: use reflection to grab the ID of our device for use154 // later.155 try {156 Field deviceIDField;157 deviceIDField = ALCdevice.class.getDeclaredField("device");158 deviceIDField.setAccessible(true);159 try {deviceID = (Long)deviceIDField.get(device);}160 catch (IllegalArgumentException e) {e.printStackTrace();}161 catch (IllegalAccessException e) {e.printStackTrace();}162 deviceIDField.setAccessible(false);}163 catch (SecurityException e) {e.printStackTrace();}164 catch (NoSuchFieldException e) {e.printStackTrace();}166 this.audioSend = new AudioSend(this.deviceID);167 this.outFormat = audioSend.getAudioFormat();168 initBuffer();170 // The LWJGL context must be established as the master context171 // before any other listeners can be created on this device.172 audioSend.initDevice();173 // Now, everything is initialized, and it is safe to add more174 // listeners.175 latch.countDown();176 }178 public void cleanup(){179 for(SoundProcessor sp : this.soundProcessorMap.values()){180 sp.cleanup();181 }182 super.cleanup();183 }185 public void updateAllListeners(){186 for (int i = 0; i < this.listeners.size(); i++){187 Listener lis = this.listeners.get(i);188 if (null != lis){189 Vector3f location = lis.getLocation();190 Vector3f velocity = lis.getVelocity();191 Vector3f orientation = lis.getUp();192 float gain = lis.getVolume();193 audioSend.setNthListener3f194 (AL10.AL_POSITION,195 location.x, location.y, location.z, i);196 audioSend.setNthListener3f197 (AL10.AL_VELOCITY,198 velocity.x, velocity.y, velocity.z, i);199 audioSend.setNthListener3f200 (AL10.AL_ORIENTATION,201 orientation.x, orientation.y, orientation.z, i);202 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);203 }204 }205 }207 private ByteBuffer buffer;;209 public static final int MIN_FRAMERATE = 10;211 private void initBuffer(){212 int bufferSize =213 (int)(this.outFormat.getSampleRate() /214 ((float)MIN_FRAMERATE)) *215 this.outFormat.getFrameSize();217 this.buffer = BufferUtils.createByteBuffer(bufferSize);218 }220 public void dispatchAudio(float tpf){222 int samplesToGet = (int) (tpf * outFormat.getSampleRate());223 try {latch.await();}224 catch (InterruptedException e) {e.printStackTrace();}225 audioSend.step(samplesToGet);226 updateAllListeners();228 for (int i = 0; i < this.listeners.size(); i++){229 buffer.clear();230 audioSend.getSamples(buffer, samplesToGet, i);231 SoundProcessor sp =232 this.soundProcessorMap.get(this.listeners.get(i));233 if (null != sp){234 sp.process235 (buffer,236 samplesToGet*outFormat.getFrameSize(), outFormat);}237 }238 }240 public void update(float tpf){241 super.update(tpf);242 dispatchAudio(tpf);243 }244 }