diff src/com/aurellem/capture/AudioSend.java @ 3:a92de00f0414

migrating files
author Robert McIntyre <rlm@mit.edu>
date Tue, 25 Oct 2011 11:55:55 -0700
parents
children
line wrap: on
line diff
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/com/aurellem/capture/AudioSend.java	Tue Oct 25 11:55:55 2011 -0700
     1.3 @@ -0,0 +1,260 @@
     1.4 +package com.aurellem.capture;
     1.5 +
     1.6 +import java.lang.reflect.Field;
     1.7 +import java.nio.ByteBuffer;
     1.8 +import java.util.HashMap;
     1.9 +import java.util.Vector;
    1.10 +import java.util.concurrent.CountDownLatch;
    1.11 +import java.util.logging.Level;
    1.12 +import java.util.logging.Logger;
    1.13 +
    1.14 +import org.lwjgl.LWJGLException;
    1.15 +import org.lwjgl.openal.AL;
    1.16 +import org.lwjgl.openal.AL10;
    1.17 +import org.lwjgl.openal.ALCdevice;
    1.18 +import org.lwjgl.openal.OpenALException;
    1.19 +
    1.20 +import com.jme3.audio.Listener;
    1.21 +import com.jme3.audio.lwjgl.LwjglAudioRenderer;
    1.22 +import com.jme3.math.Vector3f;
    1.23 +import com.jme3.util.BufferUtils;
    1.24 +
    1.25 +public class AudioSend 
    1.26 +	extends LwjglAudioRenderer implements MultiListener {
    1.27 +
    1.28 +	/**
    1.29 +	 * Keeps track of all the listeners which have been registered so far.
    1.30 +	 * The first element is <code>null</code>, which represents the zeroth 
    1.31 +	 * LWJGL listener which is created automatically.
    1.32 +	 */
    1.33 +	public Vector<Listener> listeners = new Vector<Listener>();
    1.34 +	
    1.35 +	public void initialize(){
    1.36 +		super.initialize();
    1.37 +		listeners.add(null);
    1.38 +	}
    1.39 +	
    1.40 +	/**
    1.41 +	 * This is to call the native methods which require the OpenAL device ID.
    1.42 +	 * currently it is obtained through reflection.
    1.43 +	 */
    1.44 +	private long deviceID;
    1.45 +	
    1.46 +	/**
    1.47 +	 * To ensure that <code>deviceID<code> and <code>listeners<code> are 
    1.48 +	 * properly initialized before any additional listeners are added.
    1.49 +	 */
    1.50 +	private CountDownLatch latch  = new CountDownLatch(1);
    1.51 +	
    1.52 +	private void waitForInit(){
    1.53 +		try {latch.await();} 
    1.54 +		catch (InterruptedException e) {e.printStackTrace();}
    1.55 +	}
    1.56 +	
    1.57 +	/**
    1.58 +	 * Each listener (including the main LWJGL listener) can be registered
    1.59 +	 * with a <code>SoundProcessor</code>, which this Renderer will call whenever 
    1.60 +	 * there is new audio data to be processed.
    1.61 +	 */
    1.62 +	public HashMap<Listener, SoundProcessor> soundProcessorMap =
    1.63 +		new HashMap<Listener, SoundProcessor>();
    1.64 +	
    1.65 +		
    1.66 +	/**
    1.67 +	 * Create a new slave context on the recorder device which will render all the 
    1.68 +	 * sounds in the main LWJGL context with respect to this listener.
    1.69 +	 */
    1.70 +	public void addListener(Listener l) {
    1.71 +		try {this.latch.await();} 
    1.72 +		catch (InterruptedException e) {e.printStackTrace();}
    1.73 +		this.addListener();
    1.74 +		this.listeners.add(l);
    1.75 +	}
    1.76 +	
    1.77 +	/**
    1.78 +	 * Whenever new data is rendered in the perspective of this listener, 
    1.79 +	 * this Renderer will send that data to the SoundProcessor of your choosing.
    1.80 +	 */
    1.81 +	public void registerSoundProcessor(Listener l, SoundProcessor sp) {
    1.82 +		this.soundProcessorMap.put(l, sp);
    1.83 +	}
    1.84 +	
    1.85 +	/**
    1.86 +	 * Registers a SoundProcessor for the main LWJGL context. IF all you want to 
    1.87 +	 * do is record the sound you would normally hear in your application, then 
    1.88 +	 * this is the only method you have to worry about.
    1.89 +	 */
    1.90 +	public void registerSoundProcessor(SoundProcessor sp){
    1.91 +		// register a sound processor for the default listener.
    1.92 +		this.soundProcessorMap.put(null, sp);		
    1.93 +	}
    1.94 +		
    1.95 +	private static final Logger logger = 
    1.96 +		Logger.getLogger(AudioSend.class.getName());
    1.97 +
    1.98 +	
    1.99 +	////////////   Native Methods
   1.100 +	
   1.101 +	/** This establishes the LWJGL context as the context which will be copies to all 
   1.102 +	 *  other contexts.  It must be called before any calls to <code>addListener();</code>
   1.103 +	 */
   1.104 +	public void initDevice(){
   1.105 +		ninitDevice(this.deviceID);}
   1.106 +	public static native void ninitDevice(long device);
   1.107 +	
   1.108 +	/**
   1.109 +	 * The send device does not automatically process sound.  This step function will cause 
   1.110 +	 * the desired number of samples to be processed for each listener.  The results will then 
   1.111 +	 * be available via calls to <code>getSamples()</code> for each listener.
   1.112 +	 * @param samples
   1.113 +	 */
   1.114 +	public void step(int samples){
   1.115 +		nstep(this.deviceID, samples);}
   1.116 +	public static native void nstep(long device, int samples);
   1.117 +
   1.118 +	/**
   1.119 +	 * Retrieve the final rendered sound for a particular listener.  <code>contextNum == 0</code>
   1.120 +	 * is the main LWJGL context.
   1.121 +	 * @param buffer
   1.122 +	 * @param samples
   1.123 +	 * @param contextNum
   1.124 +	 */
   1.125 +	public void getSamples(ByteBuffer buffer, int samples, int contextNum){
   1.126 +		ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);}
   1.127 +	public static native void ngetSamples(
   1.128 +			long device, ByteBuffer buffer, int position, int samples, int contextNum);
   1.129 +	
   1.130 +	/**
   1.131 +	 * Create an additional listener on the recorder device.  The device itself will manage 
   1.132 +	 * this listener and synchronize it with the main LWJGL context. Processed sound samples
   1.133 +	 * for this listener will be available via a call to <code>getSamples()</code> with 
   1.134 +	 * <code>contextNum</code> equal to the number of times this method has been called. 
   1.135 +	 */
   1.136 +	public void addListener(){naddListener(this.deviceID);}
   1.137 +	public static native void naddListener(long device);
   1.138 +	
   1.139 +	/**
   1.140 +	 * This will internally call <code>alListener3f<code> in the appropriate slave context and update
   1.141 +	 * that context's listener's parameters. Calling this for a number greater than the current 
   1.142 +	 * number of slave contexts will have no effect.
   1.143 +	 * @param pname
   1.144 +	 * @param v1
   1.145 +	 * @param v2
   1.146 +	 * @param v3
   1.147 +	 * @param contextNum
   1.148 +	 */
   1.149 +	public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){
   1.150 +		nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);}
   1.151 +	public static native void 
   1.152 +	nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum);
   1.153 +	
   1.154 +	/**
   1.155 +	 * This will internally call <code>alListenerf<code> in the appropriate slave context and update
   1.156 +	 * that context's listener's parameters. Calling this for a number greater than the current 
   1.157 +	 * number of slave contexts will have no effect.
   1.158 +	 * @param pname
   1.159 +	 * @param v1
   1.160 +	 * @param contextNum
   1.161 +	 */
   1.162 +	public void setNthListenerf(int pname, float v1, int contextNum){
   1.163 +		nsetNthListenerf(pname, v1, this.deviceID, contextNum);}
   1.164 +	public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum);
   1.165 +	
   1.166 +	/**
   1.167 +	 * Instead of taking whatever device is available on the system, this call 
   1.168 +	 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited
   1.169 +	 * capacity.  For each listener, the device renders it not to the sound device, but
   1.170 +	 * instead to buffers which it makes available via JNI.
   1.171 +	 */
   1.172 +	public void initInThread(){
   1.173 +		try{
   1.174 +            if (!AL.isCreated()){
   1.175 +                AL.create("Multiple Audio Send", 44100, 60, false);
   1.176 +            }
   1.177 +        }catch (OpenALException ex){
   1.178 +            logger.log(Level.SEVERE, "Failed to load audio library", ex);
   1.179 +            System.exit(1);
   1.180 +            return;
   1.181 +        }catch (LWJGLException ex){
   1.182 +            logger.log(Level.SEVERE, "Failed to load audio library", ex);
   1.183 +            System.exit(1);
   1.184 +            return;
   1.185 +        }
   1.186 +		super.initInThread();
   1.187 +
   1.188 +		ALCdevice device = AL.getDevice();
   1.189 +
   1.190 +		// RLM: use reflection to grab the ID of our device for use later.
   1.191 +		try {
   1.192 +			Field deviceIDField;
   1.193 +			deviceIDField = ALCdevice.class.getDeclaredField("device");
   1.194 +			deviceIDField.setAccessible(true);
   1.195 +			try {deviceID = (Long)deviceIDField.get(device);} 
   1.196 +			catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.197 +			catch (IllegalAccessException e) {e.printStackTrace();}
   1.198 +			deviceIDField.setAccessible(false);} 
   1.199 +		catch (SecurityException e) {e.printStackTrace();} 
   1.200 +		catch (NoSuchFieldException e) {e.printStackTrace();}
   1.201 +		
   1.202 +		// the LWJGL context must be established as the master context before 
   1.203 +		// any other listeners can be created on this device.
   1.204 +		initDevice();
   1.205 +		// Now, everything is initialized, and it is safe to add more listeners.
   1.206 +		latch.countDown();
   1.207 +	}
   1.208 +
   1.209 +	
   1.210 +	public void cleanup(){
   1.211 +		for(SoundProcessor sp : this.soundProcessorMap.values()){
   1.212 +			sp.cleanup();
   1.213 +		}
   1.214 +		super.cleanup();
   1.215 +	}
   1.216 +	
   1.217 +	public void updateAllListeners(){
   1.218 +		for (int i = 0; i < this.listeners.size(); i++){
   1.219 +			Listener lis = this.listeners.get(i);
   1.220 +			if (null != lis){
   1.221 +				Vector3f location = lis.getLocation();
   1.222 +				Vector3f velocity = lis.getVelocity();
   1.223 +				Vector3f orientation = lis.getUp();
   1.224 +				float gain = lis.getVolume();
   1.225 +				setNthListener3f(AL10.AL_POSITION, 
   1.226 +						location.x, location.y, location.z, i);
   1.227 +				setNthListener3f(AL10.AL_VELOCITY, 
   1.228 +						velocity.x, velocity.y, velocity.z, i);
   1.229 +				setNthListener3f(AL10.AL_ORIENTATION,
   1.230 +						orientation.x, orientation.y, orientation.z, i);
   1.231 +				setNthListenerf(AL10.AL_GAIN, gain, i);
   1.232 +			}
   1.233 +		}
   1.234 +	}
   1.235 +	
   1.236 +	
   1.237 +	public final static int BYTES_PER_SAMPLE = 4;
   1.238 +	private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 
   1.239 +	
   1.240 +	public void dispatchAudio(float tpf){
   1.241 +		int samplesToGet = (int) (tpf * 44100);
   1.242 +		try {latch.await();} 
   1.243 +		catch (InterruptedException e) {e.printStackTrace();}
   1.244 +		step(samplesToGet);
   1.245 +		updateAllListeners();
   1.246 +		
   1.247 +		for (int i = 0; i < this.listeners.size(); i++){		
   1.248 +			buffer.clear();
   1.249 +			this.getSamples(buffer, samplesToGet, i);
   1.250 +			SoundProcessor sp = 
   1.251 +			this.soundProcessorMap.get(this.listeners.get(i));
   1.252 +			if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);}
   1.253 +		}
   1.254 +		
   1.255 +	}
   1.256 +		
   1.257 +	public void update(float tpf){
   1.258 +		super.update(tpf);
   1.259 +        dispatchAudio(tpf);
   1.260 +	}
   1.261 +	
   1.262 +}
   1.263 +