Mercurial > jmeCapture
view src/com/aurellem/capture/audio/AudioSendRenderer.java @ 38:adeb88787645
fixed all problems for 16 bit mono output
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Mon, 31 Oct 2011 07:43:44 -0700 |
parents | 2a525e937d86 |
children | 784a3f4e6202 |
line wrap: on
line source
1 package com.aurellem.capture.audio;3 import java.lang.reflect.Field;4 import java.nio.ByteBuffer;5 import java.util.HashMap;6 import java.util.Vector;7 import java.util.concurrent.CountDownLatch;8 import java.util.logging.Level;9 import java.util.logging.Logger;11 import javax.sound.sampled.AudioFormat;13 import org.lwjgl.LWJGLException;14 import org.lwjgl.openal.AL;15 import org.lwjgl.openal.AL10;16 import org.lwjgl.openal.ALCdevice;17 import org.lwjgl.openal.OpenALException;19 import com.aurellem.send.AudioSend;20 import com.jme3.audio.Listener;21 import com.jme3.audio.lwjgl.LwjglAudioRenderer;22 import com.jme3.math.Vector3f;23 import com.jme3.util.BufferUtils;25 public class AudioSendRenderer27 extends LwjglAudioRenderer implements MultiListener {29 private AudioSend audioSend;30 private AudioFormat outFormat;// = new AudioFormat(44100.0f, 32, 1, true, false);32 /**33 * Keeps track of all the listeners which have been registered so far.34 * The first element is <code>null</code>, which represents the zeroth35 * LWJGL listener which is created automatically.36 */37 public Vector<Listener> listeners = new Vector<Listener>();39 public void initialize(){40 super.initialize();41 listeners.add(null);42 }44 /**45 * This is to call the native methods which require the OpenAL device ID.46 * currently it is obtained through reflection.47 */48 private long deviceID;50 /**51 * To ensure that <code>deviceID<code> and <code>listeners<code> are52 * properly initialized before any additional listeners are added.53 */54 private CountDownLatch latch = new CountDownLatch(1);56 /**57 * Each listener (including the main LWJGL listener) can be registered58 * with a <code>SoundProcessor</code>, which this Renderer will call whenever59 * there is new audio data to be processed.60 */61 public HashMap<Listener, SoundProcessor> soundProcessorMap =62 new HashMap<Listener, SoundProcessor>();65 /**66 * Create a new slave context on the recorder device which will render all the67 * sounds in the main LWJGL context with respect to this listener.68 */69 public void addListener(Listener l) {70 try {this.latch.await();}71 catch (InterruptedException e) {e.printStackTrace();}72 audioSend.addListener();73 this.listeners.add(l);74 l.setRenderer(this);75 }77 /**78 * Whenever new data is rendered in the perspective of this listener,79 * this Renderer will send that data to the SoundProcessor of your choosing.80 */81 public void registerSoundProcessor(Listener l, SoundProcessor sp) {82 this.soundProcessorMap.put(l, sp);83 }85 /**86 * Registers a SoundProcessor for the main LWJGL context. IF all you want to87 * do is record the sound you would normally hear in your application, then88 * this is the only method you have to worry about.89 */90 public void registerSoundProcessor(SoundProcessor sp){91 // register a sound processor for the default listener.92 this.soundProcessorMap.put(null, sp);93 }95 private static final Logger logger =96 Logger.getLogger(AudioSendRenderer.class.getName());100 /**101 * Instead of taking whatever device is available on the system, this call102 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited103 * capacity. For each listener, the device renders it not to the sound device, but104 * instead to buffers which it makes available via JNI.105 */106 public void initInThread(){107 try{108 if (!AL.isCreated()){109 AL.create("Multiple Audio Send", 44100, 60, false);110 }111 }catch (OpenALException ex){112 logger.log(Level.SEVERE, "Failed to load audio library", ex);113 System.exit(1);114 return;115 }catch (LWJGLException ex){116 logger.log(Level.SEVERE, "Failed to load audio library", ex);117 System.exit(1);118 return;119 }120 super.initInThread();122 ALCdevice device = AL.getDevice();124 // RLM: use reflection to grab the ID of our device for use later.125 try {126 Field deviceIDField;127 deviceIDField = ALCdevice.class.getDeclaredField("device");128 deviceIDField.setAccessible(true);129 try {deviceID = (Long)deviceIDField.get(device);}130 catch (IllegalArgumentException e) {e.printStackTrace();}131 catch (IllegalAccessException e) {e.printStackTrace();}132 deviceIDField.setAccessible(false);}133 catch (SecurityException e) {e.printStackTrace();}134 catch (NoSuchFieldException e) {e.printStackTrace();}136 this.audioSend = new AudioSend(this.deviceID);137 this.outFormat = audioSend.getAudioFormat();138 initBuffer();139 System.out.println(outFormat);141 // The LWJGL context must be established as the master context before142 // any other listeners can be created on this device.143 audioSend.initDevice();144 // Now, everything is initialized, and it is safe to add more listeners.145 latch.countDown();146 }149 public void cleanup(){150 for(SoundProcessor sp : this.soundProcessorMap.values()){151 sp.cleanup();152 }153 super.cleanup();154 }156 public void updateAllListeners(){157 for (int i = 0; i < this.listeners.size(); i++){158 Listener lis = this.listeners.get(i);159 if (null != lis){160 Vector3f location = lis.getLocation();161 Vector3f velocity = lis.getVelocity();162 Vector3f orientation = lis.getUp();163 float gain = lis.getVolume();164 audioSend.setNthListener3f(AL10.AL_POSITION,165 location.x, location.y, location.z, i);166 audioSend.setNthListener3f(AL10.AL_VELOCITY,167 velocity.x, velocity.y, velocity.z, i);168 audioSend.setNthListener3f(AL10.AL_ORIENTATION,169 orientation.x, orientation.y, orientation.z, i);170 audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);171 }172 }173 }176 //public final static int BYTES_PER_SAMPLE = 4;179 private ByteBuffer buffer;;180 private byte[] debug0;181 private byte[] debug1;183 public static final int MIN_FRAMERATE = 10;185 private void initBuffer(){186 int bufferSize = (int)(this.outFormat.getSampleRate() / ((float)MIN_FRAMERATE)) *187 this.outFormat.getFrameSize();188 this.buffer = BufferUtils.createByteBuffer(bufferSize);189 debug0 = new byte[4096];190 debug1 = new byte[4096];192 }193 /*195 */196 public void dispatchAudio(float tpf){198 int samplesToGet = (int) (tpf * outFormat.getSampleRate());199 System.out.println("want " + samplesToGet + " samples");200 try {latch.await();}201 catch (InterruptedException e) {e.printStackTrace();}202 audioSend.step(samplesToGet);203 updateAllListeners();205 for (int i = 0; i < this.listeners.size(); i++){206 buffer.clear();207 audioSend.getSamples(buffer, samplesToGet, i);208 if (i == 0 ) buffer.get(debug0);209 if (i == 1 ) buffer.get(debug1);210 SoundProcessor sp =211 this.soundProcessorMap.get(this.listeners.get(i));212 if (null != sp){sp.process(buffer, samplesToGet*outFormat.getFrameSize(), outFormat);}213 }215 for (int i = 0; i < samplesToGet; i++){216 if (debug1[i] != debug0[i]){217 System.out.println("inconsistency detected @ sample " + i);218 System.out.println("main : " + debug0[i]);219 System.out.println("aux : " + debug1[i]);221 break;222 }224 }226 }228 public void update(float tpf){229 super.update(tpf);230 dispatchAudio(tpf);231 }233 }