Mercurial > jmeCapture
changeset 11:8a6b1684f536
refactored.
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Thu, 27 Oct 2011 02:27:02 -0700 |
parents | 4c5fc53778c1 |
children | d10f4d4ff15a |
files | build.xml src/com/aurellem/capture/Capture.java src/com/aurellem/capture/audio/AudioSend.java src/com/aurellem/capture/audio/AudioSendRenderer.java src/com/aurellem/capture/audio/WaveFileWriter.java src/com/aurellem/capture/hello/HelloAudio.java src/com/aurellem/capture/hello/TestWrite.java src/com/aurellem/capture/video/XuggleVideoRecorder.java |
diffstat | 8 files changed, 222 insertions(+), 300 deletions(-) [+] |
line wrap: on
line diff
1.1 --- a/build.xml Wed Oct 26 09:38:27 2011 -0700 1.2 +++ b/build.xml Thu Oct 27 02:27:02 2011 -0700 1.3 @@ -10,6 +10,7 @@ 1.4 <pathelement path="${lib}/jme"/> 1.5 <pathelement path="${lib}/lwjgl.jar"/> 1.6 <pathelement path="${lib}/xuggle/xuggle-xuggler.jar"/> 1.7 + <pathelement path="${lib}/audio-send.jar"/> 1.8 1.9 1.10 </path>
2.1 --- a/src/com/aurellem/capture/Capture.java Wed Oct 26 09:38:27 2011 -0700 2.2 +++ b/src/com/aurellem/capture/Capture.java Thu Oct 27 02:27:02 2011 -0700 2.3 @@ -13,6 +13,9 @@ 2.4 2.5 public static void SimpleCaptureVideo(Application app, File file) throws IOException{ 2.6 app.getViewPort().setClearFlags(true, true, true); 2.7 + // this prevents pixels from staying in the render buffer between frames 2.8 + // and messing the video up. It's not a problem since Black is the default, and this 2.9 + // can be overridden by user code. 2.10 app.getViewPort().setBackgroundColor(ColorRGBA.Black); 2.11 2.12 // The XuggleVideoRecorder is better than the AVIVideoRecorder in every way 2.13 @@ -24,14 +27,9 @@ 2.14 2.15 if (file.getCanonicalPath().endsWith(".avi")){ 2.16 videoRecorder = new AVIVideoRecorder(file);} 2.17 - else { videoRecorder = new XuggleVideoRecorder(file); } 2.18 + else { videoRecorder = new XuggleVideoRecorder(file);} 2.19 2.20 app.getStateManager().attach(videoRecorder); 2.21 app.getViewPort().addFinalProcessor(videoRecorder); 2.22 - } 2.23 - 2.24 - 2.25 - 2.26 - 2.27 - 2.28 + } 2.29 }
3.1 --- a/src/com/aurellem/capture/audio/AudioSend.java Wed Oct 26 09:38:27 2011 -0700 3.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 3.3 @@ -1,260 +0,0 @@ 3.4 -package com.aurellem.capture.audio; 3.5 - 3.6 -import java.lang.reflect.Field; 3.7 -import java.nio.ByteBuffer; 3.8 -import java.util.HashMap; 3.9 -import java.util.Vector; 3.10 -import java.util.concurrent.CountDownLatch; 3.11 -import java.util.logging.Level; 3.12 -import java.util.logging.Logger; 3.13 - 3.14 -import org.lwjgl.LWJGLException; 3.15 -import org.lwjgl.openal.AL; 3.16 -import org.lwjgl.openal.AL10; 3.17 -import org.lwjgl.openal.ALCdevice; 3.18 -import org.lwjgl.openal.OpenALException; 3.19 - 3.20 -import com.jme3.audio.Listener; 3.21 -import com.jme3.audio.lwjgl.LwjglAudioRenderer; 3.22 -import com.jme3.math.Vector3f; 3.23 -import com.jme3.util.BufferUtils; 3.24 - 3.25 -public class AudioSend 3.26 - extends LwjglAudioRenderer implements MultiListener { 3.27 - 3.28 - /** 3.29 - * Keeps track of all the listeners which have been registered so far. 3.30 - * The first element is <code>null</code>, which represents the zeroth 3.31 - * LWJGL listener which is created automatically. 3.32 - */ 3.33 - public Vector<Listener> listeners = new Vector<Listener>(); 3.34 - 3.35 - public void initialize(){ 3.36 - super.initialize(); 3.37 - listeners.add(null); 3.38 - } 3.39 - 3.40 - /** 3.41 - * This is to call the native methods which require the OpenAL device ID. 3.42 - * currently it is obtained through reflection. 3.43 - */ 3.44 - private long deviceID; 3.45 - 3.46 - /** 3.47 - * To ensure that <code>deviceID<code> and <code>listeners<code> are 3.48 - * properly initialized before any additional listeners are added. 3.49 - */ 3.50 - private CountDownLatch latch = new CountDownLatch(1); 3.51 - 3.52 - private void waitForInit(){ 3.53 - try {latch.await();} 3.54 - catch (InterruptedException e) {e.printStackTrace();} 3.55 - } 3.56 - 3.57 - /** 3.58 - * Each listener (including the main LWJGL listener) can be registered 3.59 - * with a <code>SoundProcessor</code>, which this Renderer will call whenever 3.60 - * there is new audio data to be processed. 3.61 - */ 3.62 - public HashMap<Listener, SoundProcessor> soundProcessorMap = 3.63 - new HashMap<Listener, SoundProcessor>(); 3.64 - 3.65 - 3.66 - /** 3.67 - * Create a new slave context on the recorder device which will render all the 3.68 - * sounds in the main LWJGL context with respect to this listener. 3.69 - */ 3.70 - public void addListener(Listener l) { 3.71 - try {this.latch.await();} 3.72 - catch (InterruptedException e) {e.printStackTrace();} 3.73 - this.addListener(); 3.74 - this.listeners.add(l); 3.75 - } 3.76 - 3.77 - /** 3.78 - * Whenever new data is rendered in the perspective of this listener, 3.79 - * this Renderer will send that data to the SoundProcessor of your choosing. 3.80 - */ 3.81 - public void registerSoundProcessor(Listener l, SoundProcessor sp) { 3.82 - this.soundProcessorMap.put(l, sp); 3.83 - } 3.84 - 3.85 - /** 3.86 - * Registers a SoundProcessor for the main LWJGL context. IF all you want to 3.87 - * do is record the sound you would normally hear in your application, then 3.88 - * this is the only method you have to worry about. 3.89 - */ 3.90 - public void registerSoundProcessor(SoundProcessor sp){ 3.91 - // register a sound processor for the default listener. 3.92 - this.soundProcessorMap.put(null, sp); 3.93 - } 3.94 - 3.95 - private static final Logger logger = 3.96 - Logger.getLogger(AudioSend.class.getName()); 3.97 - 3.98 - 3.99 - //////////// Native Methods 3.100 - 3.101 - /** This establishes the LWJGL context as the context which will be copies to all 3.102 - * other contexts. It must be called before any calls to <code>addListener();</code> 3.103 - */ 3.104 - public void initDevice(){ 3.105 - ninitDevice(this.deviceID);} 3.106 - public static native void ninitDevice(long device); 3.107 - 3.108 - /** 3.109 - * The send device does not automatically process sound. This step function will cause 3.110 - * the desired number of samples to be processed for each listener. The results will then 3.111 - * be available via calls to <code>getSamples()</code> for each listener. 3.112 - * @param samples 3.113 - */ 3.114 - public void step(int samples){ 3.115 - nstep(this.deviceID, samples);} 3.116 - public static native void nstep(long device, int samples); 3.117 - 3.118 - /** 3.119 - * Retrieve the final rendered sound for a particular listener. <code>contextNum == 0</code> 3.120 - * is the main LWJGL context. 3.121 - * @param buffer 3.122 - * @param samples 3.123 - * @param contextNum 3.124 - */ 3.125 - public void getSamples(ByteBuffer buffer, int samples, int contextNum){ 3.126 - ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);} 3.127 - public static native void ngetSamples( 3.128 - long device, ByteBuffer buffer, int position, int samples, int contextNum); 3.129 - 3.130 - /** 3.131 - * Create an additional listener on the recorder device. The device itself will manage 3.132 - * this listener and synchronize it with the main LWJGL context. Processed sound samples 3.133 - * for this listener will be available via a call to <code>getSamples()</code> with 3.134 - * <code>contextNum</code> equal to the number of times this method has been called. 3.135 - */ 3.136 - public void addListener(){naddListener(this.deviceID);} 3.137 - public static native void naddListener(long device); 3.138 - 3.139 - /** 3.140 - * This will internally call <code>alListener3f<code> in the appropriate slave context and update 3.141 - * that context's listener's parameters. Calling this for a number greater than the current 3.142 - * number of slave contexts will have no effect. 3.143 - * @param pname 3.144 - * @param v1 3.145 - * @param v2 3.146 - * @param v3 3.147 - * @param contextNum 3.148 - */ 3.149 - public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){ 3.150 - nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);} 3.151 - public static native void 3.152 - nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum); 3.153 - 3.154 - /** 3.155 - * This will internally call <code>alListenerf<code> in the appropriate slave context and update 3.156 - * that context's listener's parameters. Calling this for a number greater than the current 3.157 - * number of slave contexts will have no effect. 3.158 - * @param pname 3.159 - * @param v1 3.160 - * @param contextNum 3.161 - */ 3.162 - public void setNthListenerf(int pname, float v1, int contextNum){ 3.163 - nsetNthListenerf(pname, v1, this.deviceID, contextNum);} 3.164 - public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum); 3.165 - 3.166 - /** 3.167 - * Instead of taking whatever device is available on the system, this call 3.168 - * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited 3.169 - * capacity. For each listener, the device renders it not to the sound device, but 3.170 - * instead to buffers which it makes available via JNI. 3.171 - */ 3.172 - public void initInThread(){ 3.173 - try{ 3.174 - if (!AL.isCreated()){ 3.175 - AL.create("Multiple Audio Send", 44100, 60, false); 3.176 - } 3.177 - }catch (OpenALException ex){ 3.178 - logger.log(Level.SEVERE, "Failed to load audio library", ex); 3.179 - System.exit(1); 3.180 - return; 3.181 - }catch (LWJGLException ex){ 3.182 - logger.log(Level.SEVERE, "Failed to load audio library", ex); 3.183 - System.exit(1); 3.184 - return; 3.185 - } 3.186 - super.initInThread(); 3.187 - 3.188 - ALCdevice device = AL.getDevice(); 3.189 - 3.190 - // RLM: use reflection to grab the ID of our device for use later. 3.191 - try { 3.192 - Field deviceIDField; 3.193 - deviceIDField = ALCdevice.class.getDeclaredField("device"); 3.194 - deviceIDField.setAccessible(true); 3.195 - try {deviceID = (Long)deviceIDField.get(device);} 3.196 - catch (IllegalArgumentException e) {e.printStackTrace();} 3.197 - catch (IllegalAccessException e) {e.printStackTrace();} 3.198 - deviceIDField.setAccessible(false);} 3.199 - catch (SecurityException e) {e.printStackTrace();} 3.200 - catch (NoSuchFieldException e) {e.printStackTrace();} 3.201 - 3.202 - // the LWJGL context must be established as the master context before 3.203 - // any other listeners can be created on this device. 3.204 - initDevice(); 3.205 - // Now, everything is initialized, and it is safe to add more listeners. 3.206 - latch.countDown(); 3.207 - } 3.208 - 3.209 - 3.210 - public void cleanup(){ 3.211 - for(SoundProcessor sp : this.soundProcessorMap.values()){ 3.212 - sp.cleanup(); 3.213 - } 3.214 - super.cleanup(); 3.215 - } 3.216 - 3.217 - public void updateAllListeners(){ 3.218 - for (int i = 0; i < this.listeners.size(); i++){ 3.219 - Listener lis = this.listeners.get(i); 3.220 - if (null != lis){ 3.221 - Vector3f location = lis.getLocation(); 3.222 - Vector3f velocity = lis.getVelocity(); 3.223 - Vector3f orientation = lis.getUp(); 3.224 - float gain = lis.getVolume(); 3.225 - setNthListener3f(AL10.AL_POSITION, 3.226 - location.x, location.y, location.z, i); 3.227 - setNthListener3f(AL10.AL_VELOCITY, 3.228 - velocity.x, velocity.y, velocity.z, i); 3.229 - setNthListener3f(AL10.AL_ORIENTATION, 3.230 - orientation.x, orientation.y, orientation.z, i); 3.231 - setNthListenerf(AL10.AL_GAIN, gain, i); 3.232 - } 3.233 - } 3.234 - } 3.235 - 3.236 - 3.237 - public final static int BYTES_PER_SAMPLE = 4; 3.238 - private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 3.239 - 3.240 - public void dispatchAudio(float tpf){ 3.241 - int samplesToGet = (int) (tpf * 44100); 3.242 - try {latch.await();} 3.243 - catch (InterruptedException e) {e.printStackTrace();} 3.244 - step(samplesToGet); 3.245 - updateAllListeners(); 3.246 - 3.247 - for (int i = 0; i < this.listeners.size(); i++){ 3.248 - buffer.clear(); 3.249 - this.getSamples(buffer, samplesToGet, i); 3.250 - SoundProcessor sp = 3.251 - this.soundProcessorMap.get(this.listeners.get(i)); 3.252 - if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} 3.253 - } 3.254 - 3.255 - } 3.256 - 3.257 - public void update(float tpf){ 3.258 - super.update(tpf); 3.259 - dispatchAudio(tpf); 3.260 - } 3.261 - 3.262 -} 3.263 -
4.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 4.2 +++ b/src/com/aurellem/capture/audio/AudioSendRenderer.java Thu Oct 27 02:27:02 2011 -0700 4.3 @@ -0,0 +1,195 @@ 4.4 +package com.aurellem.capture.audio; 4.5 + 4.6 +import java.lang.reflect.Field; 4.7 +import java.nio.ByteBuffer; 4.8 +import java.util.HashMap; 4.9 +import java.util.Vector; 4.10 +import java.util.concurrent.CountDownLatch; 4.11 +import java.util.logging.Level; 4.12 +import java.util.logging.Logger; 4.13 + 4.14 +import org.lwjgl.LWJGLException; 4.15 +import org.lwjgl.openal.AL; 4.16 +import org.lwjgl.openal.AL10; 4.17 +import org.lwjgl.openal.ALCdevice; 4.18 +import org.lwjgl.openal.OpenALException; 4.19 + 4.20 +import com.aurellem.send.AudioSend; 4.21 +import com.jme3.audio.Listener; 4.22 +import com.jme3.audio.lwjgl.LwjglAudioRenderer; 4.23 +import com.jme3.math.Vector3f; 4.24 +import com.jme3.util.BufferUtils; 4.25 + 4.26 +public class AudioSendRenderer 4.27 + 4.28 + extends LwjglAudioRenderer implements MultiListener { 4.29 + 4.30 + private AudioSend audioSend; 4.31 + 4.32 + /** 4.33 + * Keeps track of all the listeners which have been registered so far. 4.34 + * The first element is <code>null</code>, which represents the zeroth 4.35 + * LWJGL listener which is created automatically. 4.36 + */ 4.37 + public Vector<Listener> listeners = new Vector<Listener>(); 4.38 + 4.39 + public void initialize(){ 4.40 + super.initialize(); 4.41 + listeners.add(null); 4.42 + } 4.43 + 4.44 + /** 4.45 + * This is to call the native methods which require the OpenAL device ID. 4.46 + * currently it is obtained through reflection. 4.47 + */ 4.48 + private long deviceID; 4.49 + 4.50 + /** 4.51 + * To ensure that <code>deviceID<code> and <code>listeners<code> are 4.52 + * properly initialized before any additional listeners are added. 4.53 + */ 4.54 + private CountDownLatch latch = new CountDownLatch(1); 4.55 + 4.56 + /** 4.57 + * Each listener (including the main LWJGL listener) can be registered 4.58 + * with a <code>SoundProcessor</code>, which this Renderer will call whenever 4.59 + * there is new audio data to be processed. 4.60 + */ 4.61 + public HashMap<Listener, SoundProcessor> soundProcessorMap = 4.62 + new HashMap<Listener, SoundProcessor>(); 4.63 + 4.64 + 4.65 + /** 4.66 + * Create a new slave context on the recorder device which will render all the 4.67 + * sounds in the main LWJGL context with respect to this listener. 4.68 + */ 4.69 + public void addListener(Listener l) { 4.70 + try {this.latch.await();} 4.71 + catch (InterruptedException e) {e.printStackTrace();} 4.72 + audioSend.addListener(); 4.73 + this.listeners.add(l); 4.74 + } 4.75 + 4.76 + /** 4.77 + * Whenever new data is rendered in the perspective of this listener, 4.78 + * this Renderer will send that data to the SoundProcessor of your choosing. 4.79 + */ 4.80 + public void registerSoundProcessor(Listener l, SoundProcessor sp) { 4.81 + this.soundProcessorMap.put(l, sp); 4.82 + } 4.83 + 4.84 + /** 4.85 + * Registers a SoundProcessor for the main LWJGL context. IF all you want to 4.86 + * do is record the sound you would normally hear in your application, then 4.87 + * this is the only method you have to worry about. 4.88 + */ 4.89 + public void registerSoundProcessor(SoundProcessor sp){ 4.90 + // register a sound processor for the default listener. 4.91 + this.soundProcessorMap.put(null, sp); 4.92 + } 4.93 + 4.94 + private static final Logger logger = 4.95 + Logger.getLogger(AudioSendRenderer.class.getName()); 4.96 + 4.97 + 4.98 + 4.99 + /** 4.100 + * Instead of taking whatever device is available on the system, this call 4.101 + * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited 4.102 + * capacity. For each listener, the device renders it not to the sound device, but 4.103 + * instead to buffers which it makes available via JNI. 4.104 + */ 4.105 + public void initInThread(){ 4.106 + try{ 4.107 + if (!AL.isCreated()){ 4.108 + AL.create("Multiple Audio Send", 44100, 60, false); 4.109 + } 4.110 + }catch (OpenALException ex){ 4.111 + logger.log(Level.SEVERE, "Failed to load audio library", ex); 4.112 + System.exit(1); 4.113 + return; 4.114 + }catch (LWJGLException ex){ 4.115 + logger.log(Level.SEVERE, "Failed to load audio library", ex); 4.116 + System.exit(1); 4.117 + return; 4.118 + } 4.119 + super.initInThread(); 4.120 + 4.121 + ALCdevice device = AL.getDevice(); 4.122 + 4.123 + // RLM: use reflection to grab the ID of our device for use later. 4.124 + try { 4.125 + Field deviceIDField; 4.126 + deviceIDField = ALCdevice.class.getDeclaredField("device"); 4.127 + deviceIDField.setAccessible(true); 4.128 + try {deviceID = (Long)deviceIDField.get(device);} 4.129 + catch (IllegalArgumentException e) {e.printStackTrace();} 4.130 + catch (IllegalAccessException e) {e.printStackTrace();} 4.131 + deviceIDField.setAccessible(false);} 4.132 + catch (SecurityException e) {e.printStackTrace();} 4.133 + catch (NoSuchFieldException e) {e.printStackTrace();} 4.134 + 4.135 + this.audioSend = new AudioSend(this.deviceID); 4.136 + 4.137 + // The LWJGL context must be established as the master context before 4.138 + // any other listeners can be created on this device. 4.139 + audioSend.initDevice(); 4.140 + // Now, everything is initialized, and it is safe to add more listeners. 4.141 + latch.countDown(); 4.142 + } 4.143 + 4.144 + 4.145 + public void cleanup(){ 4.146 + for(SoundProcessor sp : this.soundProcessorMap.values()){ 4.147 + sp.cleanup(); 4.148 + } 4.149 + super.cleanup(); 4.150 + } 4.151 + 4.152 + public void updateAllListeners(){ 4.153 + for (int i = 0; i < this.listeners.size(); i++){ 4.154 + Listener lis = this.listeners.get(i); 4.155 + if (null != lis){ 4.156 + Vector3f location = lis.getLocation(); 4.157 + Vector3f velocity = lis.getVelocity(); 4.158 + Vector3f orientation = lis.getUp(); 4.159 + float gain = lis.getVolume(); 4.160 + audioSend.setNthListener3f(AL10.AL_POSITION, 4.161 + location.x, location.y, location.z, i); 4.162 + audioSend.setNthListener3f(AL10.AL_VELOCITY, 4.163 + velocity.x, velocity.y, velocity.z, i); 4.164 + audioSend.setNthListener3f(AL10.AL_ORIENTATION, 4.165 + orientation.x, orientation.y, orientation.z, i); 4.166 + audioSend.setNthListenerf(AL10.AL_GAIN, gain, i); 4.167 + } 4.168 + } 4.169 + } 4.170 + 4.171 + 4.172 + public final static int BYTES_PER_SAMPLE = 4; 4.173 + private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 4.174 + 4.175 + public void dispatchAudio(float tpf){ 4.176 + int samplesToGet = (int) (tpf * 44100); 4.177 + try {latch.await();} 4.178 + catch (InterruptedException e) {e.printStackTrace();} 4.179 + audioSend.step(samplesToGet); 4.180 + updateAllListeners(); 4.181 + 4.182 + for (int i = 0; i < this.listeners.size(); i++){ 4.183 + buffer.clear(); 4.184 + audioSend.getSamples(buffer, samplesToGet, i); 4.185 + SoundProcessor sp = 4.186 + this.soundProcessorMap.get(this.listeners.get(i)); 4.187 + if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} 4.188 + } 4.189 + 4.190 + } 4.191 + 4.192 + public void update(float tpf){ 4.193 + super.update(tpf); 4.194 + dispatchAudio(tpf); 4.195 + } 4.196 + 4.197 +} 4.198 +
5.1 --- a/src/com/aurellem/capture/audio/WaveFileWriter.java Wed Oct 26 09:38:27 2011 -0700 5.2 +++ b/src/com/aurellem/capture/audio/WaveFileWriter.java Thu Oct 27 02:27:02 2011 -0700 5.3 @@ -26,7 +26,6 @@ 5.4 for (int i = 0; i < this.fullWaveData.size(); i++){ 5.5 data[i] = this.fullWaveData.get(i);} 5.6 5.7 - 5.8 ByteArrayInputStream input = new ByteArrayInputStream(data); 5.9 AudioFormat format = new AudioFormat(44100.0f, 32, 1, true, false); 5.10 AudioInputStream audioInput = new AudioInputStream(input, format, data.length / 4 );
6.1 --- a/src/com/aurellem/capture/hello/HelloAudio.java Wed Oct 26 09:38:27 2011 -0700 6.2 +++ b/src/com/aurellem/capture/hello/HelloAudio.java Thu Oct 27 02:27:02 2011 -0700 6.3 @@ -21,6 +21,24 @@ 6.4 /** Sample 11 - playing 3D audio. */ 6.5 public class HelloAudio extends SimpleApplication { 6.6 6.7 + 6.8 + 6.9 + public static void main(String[] args) { 6.10 + 6.11 + // Logger.getLogger("com.jme3").setLevel(Level.OFF); 6.12 + 6.13 + HelloAudio app = new HelloAudio(); 6.14 + AppSettings settings = new AppSettings(true); 6.15 + 6.16 + //settings.setAudioRenderer("Send"); 6.17 + app.setSettings(settings); 6.18 + app.setShowSettings(false); 6.19 + app.start(); 6.20 + app.setPauseOnLostFocus(false); 6.21 + } 6.22 + 6.23 + 6.24 + 6.25 private AudioNode audio_gun; 6.26 private AudioNode audio_nature; 6.27 private Geometry player; 6.28 @@ -34,20 +52,6 @@ 6.29 } 6.30 6.31 6.32 - public static void main(String[] args) { 6.33 - 6.34 - // Logger.getLogger("com.jme3").setLevel(Level.OFF); 6.35 - 6.36 - HelloAudio app = new HelloAudio(); 6.37 - AppSettings settings = new AppSettings(true); 6.38 - 6.39 - settings.setAudioRenderer("Send"); 6.40 - app.setSettings(settings); 6.41 - app.setShowSettings(false); 6.42 - app.start(); 6.43 - app.setPauseOnLostFocus(false); 6.44 - } 6.45 - 6.46 @Override 6.47 public void simpleInitApp() { 6.48 this.setTimer(new IsoTimer(60)); 6.49 @@ -93,7 +97,7 @@ 6.50 private void initAudio() { 6.51 //audioRenderer.setEnvironment(Environment.Cavern); 6.52 /* gun shot sound is to be triggered by a mouse click. */ 6.53 - audio_gun = new AudioNode(audioRenderer, assetManager, "Sound/Effects/Gun.wav", false); 6.54 + audio_gun = new AudioNode(assetManager, "Sound/Effects/Gun.wav", false); 6.55 //audio_gun = new AudioNode(assetManager, "Sound/Effects/dream.wav", false, false); 6.56 audio_gun.setLooping(false); 6.57 audio_gun.setVolume(2);
7.1 --- a/src/com/aurellem/capture/hello/TestWrite.java Wed Oct 26 09:38:27 2011 -0700 7.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 7.3 @@ -1,13 +0,0 @@ 7.4 -package com.aurellem.capture.hello; 7.5 - 7.6 -public class TestWrite { 7.7 - 7.8 - 7.9 - 7.10 - 7.11 - 7.12 - 7.13 - 7.14 - 7.15 - 7.16 -}
8.1 --- a/src/com/aurellem/capture/video/XuggleVideoRecorder.java Wed Oct 26 09:38:27 2011 -0700 8.2 +++ b/src/com/aurellem/capture/video/XuggleVideoRecorder.java Thu Oct 27 02:27:02 2011 -0700 8.3 @@ -12,8 +12,7 @@ 8.4 8.5 /** 8.6 * Handles writing video files using Xuggle. 8.7 - * 8.8 - * 8.9 + * 8.10 * @author Robert McIntyre 8.11 * 8.12 */ 8.13 @@ -39,8 +38,7 @@ 8.14 width, height); 8.15 this.videoReady = true; 8.16 } 8.17 - 8.18 - 8.19 + 8.20 public void record(BufferedImage rawFrame) { 8.21 if (!this.videoReady){initVideo();} 8.22 // convert the Image into the form that Xuggle likes.