diff src/com/aurellem/capture/audio/AudioSendRenderer.java @ 65:23e3df41db3c

reformatting for web
author Robert McIntyre <rlm@mit.edu>
date Sat, 11 Feb 2012 12:25:26 -0700
parents 2f129118e2d6
children
line wrap: on
line diff
     1.1 --- a/src/com/aurellem/capture/audio/AudioSendRenderer.java	Fri Feb 10 04:07:05 2012 -0700
     1.2 +++ b/src/com/aurellem/capture/audio/AudioSendRenderer.java	Sat Feb 11 12:25:26 2012 -0700
     1.3 @@ -27,106 +27,113 @@
     1.4  
     1.5  public class AudioSendRenderer 
     1.6  
     1.7 -	extends LwjglAudioRenderer implements MultiListener {
     1.8 +    extends LwjglAudioRenderer implements MultiListener {
     1.9  
    1.10 -	private AudioSend audioSend;
    1.11 -	private AudioFormat outFormat;// =  new AudioFormat(44100.0f, 32, 1, true, false);
    1.12 +    private AudioSend audioSend;
    1.13 +    private AudioFormat outFormat;
    1.14  	
    1.15 -	/**
    1.16 -	 * Keeps track of all the listeners which have been registered so far.
    1.17 -	 * The first element is <code>null</code>, which represents the zeroth 
    1.18 -	 * LWJGL listener which is created automatically.
    1.19 -	 */
    1.20 -	public Vector<Listener> listeners = new Vector<Listener>();
    1.21 +    /**
    1.22 +     * Keeps track of all the listeners which have been registered
    1.23 +     * so far.  The first element is <code>null</code>, which
    1.24 +     * represents the zeroth LWJGL listener which is created
    1.25 +     * automatically.
    1.26 +     */
    1.27 +    public Vector<Listener> listeners = new Vector<Listener>();
    1.28  	
    1.29 -	public void initialize(){
    1.30 -		super.initialize();
    1.31 -		listeners.add(null);
    1.32 +    public void initialize(){
    1.33 +	super.initialize();
    1.34 +	listeners.add(null);
    1.35 +    }
    1.36 +	
    1.37 +    /**
    1.38 +     * This is to call the native methods which require the OpenAL
    1.39 +     * device ID.  Currently it is obtained through reflection.
    1.40 +     */
    1.41 +    private long deviceID;
    1.42 +	
    1.43 +    /**
    1.44 +     * To ensure that <code>deviceID<code> and
    1.45 +     * <code>listeners<code> are properly initialized before any
    1.46 +     * additional listeners are added.
    1.47 +     */
    1.48 +    private CountDownLatch latch  = new CountDownLatch(1);
    1.49 +	
    1.50 +    /**
    1.51 +     * Each listener (including the main LWJGL listener) can be
    1.52 +     * registered with a <code>SoundProcessor</code>, which this
    1.53 +     * Renderer will call whenever there is new audio data to be
    1.54 +     * processed.
    1.55 +     */
    1.56 +    public HashMap<Listener, SoundProcessor> soundProcessorMap =
    1.57 +	new HashMap<Listener, SoundProcessor>();
    1.58 +	
    1.59 +    /**
    1.60 +     * Create a new slave context on the recorder device which
    1.61 +     * will render all the sounds in the main LWJGL context with
    1.62 +     * respect to this listener.
    1.63 +     */
    1.64 +    public void addListener(Listener l) {
    1.65 +	try {this.latch.await();} 
    1.66 +	catch (InterruptedException e) {e.printStackTrace();}
    1.67 +	audioSend.addListener();
    1.68 +	this.listeners.add(l);
    1.69 +	l.setRenderer(this);
    1.70 +    }
    1.71 +	
    1.72 +    /**
    1.73 +     * Whenever new data is rendered in the perspective of this
    1.74 +     * listener, this Renderer will send that data to the
    1.75 +     * SoundProcessor of your choosing.
    1.76 +     */
    1.77 +    public void registerSoundProcessor(Listener l, SoundProcessor sp) {
    1.78 +	this.soundProcessorMap.put(l, sp);
    1.79 +    }
    1.80 +	
    1.81 +    /**
    1.82 +     * Registers a SoundProcessor for the main LWJGL context. Ig all
    1.83 +     * you want to do is record the sound you would normally hear in
    1.84 +     * your application, then this is the only method you have to
    1.85 +     * worry about.
    1.86 +     */
    1.87 +    public void registerSoundProcessor(SoundProcessor sp){
    1.88 +	// register a sound processor for the default listener.
    1.89 +	this.soundProcessorMap.put(null, sp);		
    1.90 +    }
    1.91 +		
    1.92 +    private static final Logger logger = 
    1.93 +	Logger.getLogger(AudioSendRenderer.class.getName());
    1.94 +	
    1.95 +    /**
    1.96 +     * Instead of taking whatever device is available on the system,
    1.97 +     * this call creates the "Multiple Audio Send" device, which
    1.98 +     * supports multiple listeners in a limited capacity.  For each
    1.99 +     * listener, the device renders it not to the sound device, but
   1.100 +     * instead to buffers which it makes available via JNI.
   1.101 +     */
   1.102 +    public void initInThread(){
   1.103 +	try{
   1.104 +	    switch (JmeSystem.getPlatform()){
   1.105 +	    case Windows64:
   1.106 +		Natives.extractNativeLib("windows/audioSend", 
   1.107 +					 "OpenAL64", true, true);
   1.108 +		break;
   1.109 +	    case Windows32:
   1.110 +		Natives.extractNativeLib("windows/audioSend", 
   1.111 +					 "OpenAL32", true, true);	
   1.112 +		break;
   1.113 +	    case Linux64:
   1.114 +		Natives.extractNativeLib("linux/audioSend", 
   1.115 +					 "openal64", true, true);
   1.116 +		break;
   1.117 +	    case Linux32:
   1.118 +		Natives.extractNativeLib("linux/audioSend", 
   1.119 +					 "openal", true, true);
   1.120 +		break;
   1.121 +	    }
   1.122  	}
   1.123 -	
   1.124 -	/**
   1.125 -	 * This is to call the native methods which require the OpenAL device ID.
   1.126 -	 * currently it is obtained through reflection.
   1.127 -	 */
   1.128 -	private long deviceID;
   1.129 -	
   1.130 -	/**
   1.131 -	 * To ensure that <code>deviceID<code> and <code>listeners<code> are 
   1.132 -	 * properly initialized before any additional listeners are added.
   1.133 -	 */
   1.134 -	private CountDownLatch latch  = new CountDownLatch(1);
   1.135 -	
   1.136 -	/**
   1.137 -	 * Each listener (including the main LWJGL listener) can be registered
   1.138 -	 * with a <code>SoundProcessor</code>, which this Renderer will call whenever 
   1.139 -	 * there is new audio data to be processed.
   1.140 -	 */
   1.141 -	public HashMap<Listener, SoundProcessor> soundProcessorMap =
   1.142 -		new HashMap<Listener, SoundProcessor>();
   1.143 -	
   1.144 -		
   1.145 -	/**
   1.146 -	 * Create a new slave context on the recorder device which will render all the 
   1.147 -	 * sounds in the main LWJGL context with respect to this listener.
   1.148 -	 */
   1.149 -	public void addListener(Listener l) {
   1.150 -		try {this.latch.await();} 
   1.151 -		catch (InterruptedException e) {e.printStackTrace();}
   1.152 -		audioSend.addListener();
   1.153 -		this.listeners.add(l);
   1.154 -		l.setRenderer(this);
   1.155 -	}
   1.156 -	
   1.157 -	/**
   1.158 -	 * Whenever new data is rendered in the perspective of this listener, 
   1.159 -	 * this Renderer will send that data to the SoundProcessor of your choosing.
   1.160 -	 */
   1.161 -	public void registerSoundProcessor(Listener l, SoundProcessor sp) {
   1.162 -		this.soundProcessorMap.put(l, sp);
   1.163 -	}
   1.164 -	
   1.165 -	/**
   1.166 -	 * Registers a SoundProcessor for the main LWJGL context. IF all you want to 
   1.167 -	 * do is record the sound you would normally hear in your application, then 
   1.168 -	 * this is the only method you have to worry about.
   1.169 -	 */
   1.170 -	public void registerSoundProcessor(SoundProcessor sp){
   1.171 -		// register a sound processor for the default listener.
   1.172 -		this.soundProcessorMap.put(null, sp);		
   1.173 -	}
   1.174 -		
   1.175 -	private static final Logger logger = 
   1.176 -		Logger.getLogger(AudioSendRenderer.class.getName());
   1.177 +	catch (IOException ex) {ex.printStackTrace();}
   1.178  
   1.179 -	
   1.180 -	
   1.181 -	/**
   1.182 -	 * Instead of taking whatever device is available on the system, this call 
   1.183 -	 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited
   1.184 -	 * capacity.  For each listener, the device renders it not to the sound device, but
   1.185 -	 * instead to buffers which it makes available via JNI.
   1.186 -	 */
   1.187 -	public void initInThread(){
   1.188 -		
   1.189 -		try{
   1.190 -			switch (JmeSystem.getPlatform()){
   1.191 -			case Windows64:
   1.192 -				Natives.extractNativeLib("windows/audioSend", "OpenAL64", true, true);
   1.193 -				break;
   1.194 -			case Windows32:
   1.195 -				Natives.extractNativeLib("windows/audioSend", "OpenAL32", true, true);	
   1.196 -				break;
   1.197 -			case Linux64:
   1.198 -				Natives.extractNativeLib("linux/audioSend", "openal64", true, true);
   1.199 -				break;
   1.200 -			case Linux32:
   1.201 -				Natives.extractNativeLib("linux/audioSend", "openal", true, true);
   1.202 -				break;
   1.203 -			}
   1.204 -		}
   1.205 -		catch (IOException ex) {ex.printStackTrace();}
   1.206 -
   1.207 -		try{
   1.208 +	try{
   1.209              if (!AL.isCreated()){
   1.210                  AL.create("Multiple Audio Send", 44100, 60, false);
   1.211              }
   1.212 @@ -139,95 +146,100 @@
   1.213              System.exit(1);
   1.214              return;
   1.215          }
   1.216 -		super.initInThread();
   1.217 +	super.initInThread();
   1.218  
   1.219 -		ALCdevice device = AL.getDevice();
   1.220 +	ALCdevice device = AL.getDevice();
   1.221  
   1.222 -		// RLM: use reflection to grab the ID of our device for use later.
   1.223 -		try {
   1.224 -			Field deviceIDField;
   1.225 -			deviceIDField = ALCdevice.class.getDeclaredField("device");
   1.226 -			deviceIDField.setAccessible(true);
   1.227 -			try {deviceID = (Long)deviceIDField.get(device);} 
   1.228 -			catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.229 -			catch (IllegalAccessException e) {e.printStackTrace();}
   1.230 -			deviceIDField.setAccessible(false);} 
   1.231 -		catch (SecurityException e) {e.printStackTrace();} 
   1.232 -		catch (NoSuchFieldException e) {e.printStackTrace();}
   1.233 +	// RLM: use reflection to grab the ID of our device for use
   1.234 +	// later.
   1.235 +	try {
   1.236 +	    Field deviceIDField;
   1.237 +	    deviceIDField = ALCdevice.class.getDeclaredField("device");
   1.238 +	    deviceIDField.setAccessible(true);
   1.239 +	    try {deviceID = (Long)deviceIDField.get(device);} 
   1.240 +	    catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.241 +	    catch (IllegalAccessException e) {e.printStackTrace();}
   1.242 +	    deviceIDField.setAccessible(false);} 
   1.243 +	catch (SecurityException e) {e.printStackTrace();} 
   1.244 +	catch (NoSuchFieldException e) {e.printStackTrace();}
   1.245  		
   1.246 -		this.audioSend = new AudioSend(this.deviceID);
   1.247 -		this.outFormat = audioSend.getAudioFormat();
   1.248 -		initBuffer();
   1.249 +	this.audioSend = new AudioSend(this.deviceID);
   1.250 +	this.outFormat = audioSend.getAudioFormat();
   1.251 +	initBuffer();
   1.252  				
   1.253 -		// The LWJGL context must be established as the master context before 
   1.254 -		// any other listeners can be created on this device.
   1.255 -		audioSend.initDevice();
   1.256 -		// Now, everything is initialized, and it is safe to add more listeners.
   1.257 -		latch.countDown();
   1.258 +	// The LWJGL context must be established as the master context
   1.259 +	// before any other listeners can be created on this device.
   1.260 +	audioSend.initDevice();
   1.261 +	// Now, everything is initialized, and it is safe to add more
   1.262 +	// listeners.
   1.263 +	latch.countDown();
   1.264 +    }
   1.265 +	
   1.266 +    public void cleanup(){
   1.267 +	for(SoundProcessor sp : this.soundProcessorMap.values()){
   1.268 +	    sp.cleanup();
   1.269  	}
   1.270 +	super.cleanup();
   1.271 +    }
   1.272 +	
   1.273 +    public void updateAllListeners(){
   1.274 +	for (int i = 0; i < this.listeners.size(); i++){
   1.275 +	    Listener lis = this.listeners.get(i);
   1.276 +	    if (null != lis){
   1.277 +		Vector3f location = lis.getLocation();
   1.278 +		Vector3f velocity = lis.getVelocity();
   1.279 +		Vector3f orientation = lis.getUp();
   1.280 +		float gain = lis.getVolume();
   1.281 +		audioSend.setNthListener3f
   1.282 +		    (AL10.AL_POSITION, 
   1.283 +		     location.x, location.y, location.z, i);
   1.284 +		audioSend.setNthListener3f
   1.285 +		    (AL10.AL_VELOCITY, 
   1.286 +		     velocity.x, velocity.y, velocity.z, i);
   1.287 +		audioSend.setNthListener3f
   1.288 +		    (AL10.AL_ORIENTATION,
   1.289 +		     orientation.x, orientation.y, orientation.z, i);
   1.290 +		audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);
   1.291 +	    }
   1.292 +	}
   1.293 +    }
   1.294  
   1.295 +    private ByteBuffer buffer;;
   1.296 +
   1.297 +    public static final int MIN_FRAMERATE = 10;
   1.298  	
   1.299 -	public void cleanup(){
   1.300 -		for(SoundProcessor sp : this.soundProcessorMap.values()){
   1.301 -			sp.cleanup();
   1.302 -		}
   1.303 -		super.cleanup();
   1.304 +    private void initBuffer(){
   1.305 +	int bufferSize = 
   1.306 +	    (int)(this.outFormat.getSampleRate() / 
   1.307 +		  ((float)MIN_FRAMERATE)) *
   1.308 +	    this.outFormat.getFrameSize();
   1.309 +
   1.310 +	this.buffer = BufferUtils.createByteBuffer(bufferSize);
   1.311 +    }
   1.312 +
   1.313 +    public void dispatchAudio(float tpf){
   1.314 +		
   1.315 +	int samplesToGet = (int) (tpf * outFormat.getSampleRate());
   1.316 +	try {latch.await();} 
   1.317 +	catch (InterruptedException e) {e.printStackTrace();}
   1.318 +	audioSend.step(samplesToGet);
   1.319 +	updateAllListeners();
   1.320 +
   1.321 +	for (int i = 0; i < this.listeners.size(); i++){		
   1.322 +	    buffer.clear();
   1.323 +	    audioSend.getSamples(buffer, samplesToGet, i);
   1.324 +	    SoundProcessor sp = 
   1.325 +		this.soundProcessorMap.get(this.listeners.get(i));
   1.326 +	    if (null != sp){
   1.327 +		sp.process 
   1.328 +		    (buffer, 
   1.329 +		     samplesToGet*outFormat.getFrameSize(), outFormat);}
   1.330  	}
   1.331 -	
   1.332 -	public void updateAllListeners(){
   1.333 -		for (int i = 0; i < this.listeners.size(); i++){
   1.334 -			Listener lis = this.listeners.get(i);
   1.335 -			if (null != lis){
   1.336 -				Vector3f location = lis.getLocation();
   1.337 -				Vector3f velocity = lis.getVelocity();
   1.338 -				Vector3f orientation = lis.getUp();
   1.339 -				float gain = lis.getVolume();
   1.340 -				audioSend.setNthListener3f(AL10.AL_POSITION, 
   1.341 -						location.x, location.y, location.z, i);
   1.342 -				audioSend.setNthListener3f(AL10.AL_VELOCITY, 
   1.343 -						velocity.x, velocity.y, velocity.z, i);
   1.344 -				audioSend.setNthListener3f(AL10.AL_ORIENTATION,
   1.345 -						orientation.x, orientation.y, orientation.z, i);
   1.346 -				audioSend.setNthListenerf(AL10.AL_GAIN, gain, i);
   1.347 -			}
   1.348 -		}
   1.349 -	}
   1.350 -	
   1.351 -	
   1.352 -	private ByteBuffer buffer;;
   1.353 -
   1.354 -	public static final int MIN_FRAMERATE = 10;
   1.355 -	
   1.356 -	private void initBuffer(){
   1.357 -		int bufferSize = (int)(this.outFormat.getSampleRate() / ((float)MIN_FRAMERATE)) * 
   1.358 -					this.outFormat.getFrameSize();
   1.359 -		this.buffer = BufferUtils.createByteBuffer(bufferSize);
   1.360 -	}
   1.361 -	/*
   1.362 -	
   1.363 -	*/
   1.364 -	public void dispatchAudio(float tpf){
   1.365 +    }
   1.366  		
   1.367 -		int samplesToGet = (int) (tpf * outFormat.getSampleRate());
   1.368 -		try {latch.await();} 
   1.369 -		catch (InterruptedException e) {e.printStackTrace();}
   1.370 -		audioSend.step(samplesToGet);
   1.371 -		updateAllListeners();
   1.372 -
   1.373 -		for (int i = 0; i < this.listeners.size(); i++){		
   1.374 -			buffer.clear();
   1.375 -			audioSend.getSamples(buffer, samplesToGet, i);
   1.376 -			SoundProcessor sp = 
   1.377 -					this.soundProcessorMap.get(this.listeners.get(i));
   1.378 -			if (null != sp){sp.process(buffer, samplesToGet*outFormat.getFrameSize(), outFormat);}
   1.379 -		}
   1.380 -
   1.381 -	}
   1.382 -		
   1.383 -	public void update(float tpf){
   1.384 -		super.update(tpf);
   1.385 +    public void update(float tpf){
   1.386 +	super.update(tpf);
   1.387          dispatchAudio(tpf);
   1.388 -	}
   1.389 -	
   1.390 +    }
   1.391  }
   1.392