annotate src/com/aurellem/capture/audio/AudioSendRenderer.java @ 47:74b53dfe369e

stressing similiarity between audio and video capture
author Robert McIntyre <rlm@mit.edu>
date Sat, 03 Dec 2011 12:48:40 -0600
parents 2f129118e2d6
children 23e3df41db3c
rev   line source
rlm@11 1 package com.aurellem.capture.audio;
rlm@11 2
rlm@43 3 import java.io.IOException;
rlm@11 4 import java.lang.reflect.Field;
rlm@11 5 import java.nio.ByteBuffer;
rlm@11 6 import java.util.HashMap;
rlm@11 7 import java.util.Vector;
rlm@11 8 import java.util.concurrent.CountDownLatch;
rlm@11 9 import java.util.logging.Level;
rlm@11 10 import java.util.logging.Logger;
rlm@11 11
rlm@29 12 import javax.sound.sampled.AudioFormat;
rlm@29 13
rlm@11 14 import org.lwjgl.LWJGLException;
rlm@11 15 import org.lwjgl.openal.AL;
rlm@11 16 import org.lwjgl.openal.AL10;
rlm@11 17 import org.lwjgl.openal.ALCdevice;
rlm@11 18 import org.lwjgl.openal.OpenALException;
rlm@11 19
rlm@11 20 import com.aurellem.send.AudioSend;
rlm@11 21 import com.jme3.audio.Listener;
rlm@11 22 import com.jme3.audio.lwjgl.LwjglAudioRenderer;
rlm@11 23 import com.jme3.math.Vector3f;
rlm@43 24 import com.jme3.system.JmeSystem;
rlm@43 25 import com.jme3.system.Natives;
rlm@11 26 import com.jme3.util.BufferUtils;
rlm@11 27
rlm@11 28 public class AudioSendRenderer
rlm@11 29
rlm@11 30 extends LwjglAudioRenderer implements MultiListener {
rlm@11 31
rlm@11 32 private AudioSend audioSend;
rlm@38 33 private AudioFormat outFormat;// = new AudioFormat(44100.0f, 32, 1, true, false);
rlm@11 34
rlm@11 35 /**
rlm@11 36 * Keeps track of all the listeners which have been registered so far.
rlm@11 37 * The first element is <code>null</code>, which represents the zeroth
rlm@11 38 * LWJGL listener which is created automatically.
rlm@11 39 */
rlm@11 40 public Vector<Listener> listeners = new Vector<Listener>();
rlm@11 41
rlm@11 42 public void initialize(){
rlm@11 43 super.initialize();
rlm@11 44 listeners.add(null);
rlm@11 45 }
rlm@11 46
rlm@11 47 /**
rlm@11 48 * This is to call the native methods which require the OpenAL device ID.
rlm@11 49 * currently it is obtained through reflection.
rlm@11 50 */
rlm@11 51 private long deviceID;
rlm@11 52
rlm@11 53 /**
rlm@11 54 * To ensure that <code>deviceID<code> and <code>listeners<code> are
rlm@11 55 * properly initialized before any additional listeners are added.
rlm@11 56 */
rlm@11 57 private CountDownLatch latch = new CountDownLatch(1);
rlm@11 58
rlm@11 59 /**
rlm@11 60 * Each listener (including the main LWJGL listener) can be registered
rlm@11 61 * with a <code>SoundProcessor</code>, which this Renderer will call whenever
rlm@11 62 * there is new audio data to be processed.
rlm@11 63 */
rlm@11 64 public HashMap<Listener, SoundProcessor> soundProcessorMap =
rlm@11 65 new HashMap<Listener, SoundProcessor>();
rlm@11 66
rlm@11 67
rlm@11 68 /**
rlm@11 69 * Create a new slave context on the recorder device which will render all the
rlm@11 70 * sounds in the main LWJGL context with respect to this listener.
rlm@11 71 */
rlm@11 72 public void addListener(Listener l) {
rlm@11 73 try {this.latch.await();}
rlm@11 74 catch (InterruptedException e) {e.printStackTrace();}
rlm@11 75 audioSend.addListener();
rlm@11 76 this.listeners.add(l);
rlm@34 77 l.setRenderer(this);
rlm@11 78 }
rlm@11 79
rlm@11 80 /**
rlm@11 81 * Whenever new data is rendered in the perspective of this listener,
rlm@11 82 * this Renderer will send that data to the SoundProcessor of your choosing.
rlm@11 83 */
rlm@11 84 public void registerSoundProcessor(Listener l, SoundProcessor sp) {
rlm@11 85 this.soundProcessorMap.put(l, sp);
rlm@11 86 }
rlm@11 87
rlm@11 88 /**
rlm@11 89 * Registers a SoundProcessor for the main LWJGL context. IF all you want to
rlm@11 90 * do is record the sound you would normally hear in your application, then
rlm@11 91 * this is the only method you have to worry about.
rlm@11 92 */
rlm@11 93 public void registerSoundProcessor(SoundProcessor sp){
rlm@11 94 // register a sound processor for the default listener.
rlm@11 95 this.soundProcessorMap.put(null, sp);
rlm@11 96 }
rlm@11 97
rlm@11 98 private static final Logger logger =
rlm@11 99 Logger.getLogger(AudioSendRenderer.class.getName());
rlm@11 100
rlm@11 101
rlm@11 102
rlm@11 103 /**
rlm@11 104 * Instead of taking whatever device is available on the system, this call
rlm@11 105 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited
rlm@11 106 * capacity. For each listener, the device renders it not to the sound device, but
rlm@11 107 * instead to buffers which it makes available via JNI.
rlm@11 108 */
rlm@11 109 public void initInThread(){
rlm@43 110
rlm@43 111 try{
rlm@43 112 switch (JmeSystem.getPlatform()){
rlm@43 113 case Windows64:
rlm@43 114 Natives.extractNativeLib("windows/audioSend", "OpenAL64", true, true);
rlm@43 115 break;
rlm@43 116 case Windows32:
rlm@43 117 Natives.extractNativeLib("windows/audioSend", "OpenAL32", true, true);
rlm@43 118 break;
rlm@43 119 case Linux64:
rlm@43 120 Natives.extractNativeLib("linux/audioSend", "openal64", true, true);
rlm@43 121 break;
rlm@43 122 case Linux32:
rlm@43 123 Natives.extractNativeLib("linux/audioSend", "openal", true, true);
rlm@43 124 break;
rlm@43 125 }
rlm@43 126 }
rlm@43 127 catch (IOException ex) {ex.printStackTrace();}
rlm@43 128
rlm@11 129 try{
rlm@11 130 if (!AL.isCreated()){
rlm@38 131 AL.create("Multiple Audio Send", 44100, 60, false);
rlm@11 132 }
rlm@11 133 }catch (OpenALException ex){
rlm@11 134 logger.log(Level.SEVERE, "Failed to load audio library", ex);
rlm@11 135 System.exit(1);
rlm@11 136 return;
rlm@11 137 }catch (LWJGLException ex){
rlm@11 138 logger.log(Level.SEVERE, "Failed to load audio library", ex);
rlm@11 139 System.exit(1);
rlm@11 140 return;
rlm@11 141 }
rlm@11 142 super.initInThread();
rlm@11 143
rlm@11 144 ALCdevice device = AL.getDevice();
rlm@11 145
rlm@11 146 // RLM: use reflection to grab the ID of our device for use later.
rlm@11 147 try {
rlm@11 148 Field deviceIDField;
rlm@11 149 deviceIDField = ALCdevice.class.getDeclaredField("device");
rlm@11 150 deviceIDField.setAccessible(true);
rlm@11 151 try {deviceID = (Long)deviceIDField.get(device);}
rlm@11 152 catch (IllegalArgumentException e) {e.printStackTrace();}
rlm@11 153 catch (IllegalAccessException e) {e.printStackTrace();}
rlm@11 154 deviceIDField.setAccessible(false);}
rlm@11 155 catch (SecurityException e) {e.printStackTrace();}
rlm@11 156 catch (NoSuchFieldException e) {e.printStackTrace();}
rlm@11 157
rlm@11 158 this.audioSend = new AudioSend(this.deviceID);
rlm@38 159 this.outFormat = audioSend.getAudioFormat();
rlm@38 160 initBuffer();
rlm@39 161
rlm@11 162 // The LWJGL context must be established as the master context before
rlm@11 163 // any other listeners can be created on this device.
rlm@11 164 audioSend.initDevice();
rlm@11 165 // Now, everything is initialized, and it is safe to add more listeners.
rlm@11 166 latch.countDown();
rlm@11 167 }
rlm@11 168
rlm@11 169
rlm@11 170 public void cleanup(){
rlm@11 171 for(SoundProcessor sp : this.soundProcessorMap.values()){
rlm@11 172 sp.cleanup();
rlm@11 173 }
rlm@11 174 super.cleanup();
rlm@11 175 }
rlm@11 176
rlm@11 177 public void updateAllListeners(){
rlm@11 178 for (int i = 0; i < this.listeners.size(); i++){
rlm@11 179 Listener lis = this.listeners.get(i);
rlm@11 180 if (null != lis){
rlm@11 181 Vector3f location = lis.getLocation();
rlm@11 182 Vector3f velocity = lis.getVelocity();
rlm@11 183 Vector3f orientation = lis.getUp();
rlm@11 184 float gain = lis.getVolume();
rlm@11 185 audioSend.setNthListener3f(AL10.AL_POSITION,
rlm@11 186 location.x, location.y, location.z, i);
rlm@11 187 audioSend.setNthListener3f(AL10.AL_VELOCITY,
rlm@11 188 velocity.x, velocity.y, velocity.z, i);
rlm@11 189 audioSend.setNthListener3f(AL10.AL_ORIENTATION,
rlm@11 190 orientation.x, orientation.y, orientation.z, i);
rlm@11 191 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);
rlm@11 192 }
rlm@11 193 }
rlm@11 194 }
rlm@11 195
rlm@11 196
rlm@38 197 private ByteBuffer buffer;;
rlm@39 198
rlm@38 199 public static final int MIN_FRAMERATE = 10;
rlm@38 200
rlm@38 201 private void initBuffer(){
rlm@38 202 int bufferSize = (int)(this.outFormat.getSampleRate() / ((float)MIN_FRAMERATE)) *
rlm@38 203 this.outFormat.getFrameSize();
rlm@38 204 this.buffer = BufferUtils.createByteBuffer(bufferSize);
rlm@38 205 }
rlm@38 206 /*
rlm@38 207
rlm@38 208 */
rlm@11 209 public void dispatchAudio(float tpf){
rlm@36 210
rlm@29 211 int samplesToGet = (int) (tpf * outFormat.getSampleRate());
rlm@11 212 try {latch.await();}
rlm@11 213 catch (InterruptedException e) {e.printStackTrace();}
rlm@11 214 audioSend.step(samplesToGet);
rlm@11 215 updateAllListeners();
rlm@39 216
rlm@11 217 for (int i = 0; i < this.listeners.size(); i++){
rlm@11 218 buffer.clear();
rlm@11 219 audioSend.getSamples(buffer, samplesToGet, i);
rlm@11 220 SoundProcessor sp =
rlm@39 221 this.soundProcessorMap.get(this.listeners.get(i));
rlm@30 222 if (null != sp){sp.process(buffer, samplesToGet*outFormat.getFrameSize(), outFormat);}
rlm@11 223 }
rlm@39 224
rlm@11 225 }
rlm@11 226
rlm@11 227 public void update(float tpf){
rlm@11 228 super.update(tpf);
rlm@11 229 dispatchAudio(tpf);
rlm@11 230 }
rlm@11 231
rlm@11 232 }
rlm@11 233