Mercurial > jmeCapture
changeset 65:23e3df41db3c
reformatting for web
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sat, 11 Feb 2012 12:25:26 -0700 |
parents | 155c70b7e6de |
children | 0d7347b916ae |
files | src/com/aurellem/capture/audio/AudioSendRenderer.java src/com/aurellem/capture/audio/SoundProcessor.java src/com/aurellem/capture/examples/Advanced.java |
diffstat | 3 files changed, 437 insertions(+), 413 deletions(-) [+] |
line wrap: on
line diff
1.1 --- a/src/com/aurellem/capture/audio/AudioSendRenderer.java Fri Feb 10 04:07:05 2012 -0700 1.2 +++ b/src/com/aurellem/capture/audio/AudioSendRenderer.java Sat Feb 11 12:25:26 2012 -0700 1.3 @@ -27,106 +27,113 @@ 1.4 1.5 public class AudioSendRenderer 1.6 1.7 - extends LwjglAudioRenderer implements MultiListener { 1.8 + extends LwjglAudioRenderer implements MultiListener { 1.9 1.10 - private AudioSend audioSend; 1.11 - private AudioFormat outFormat;// = new AudioFormat(44100.0f, 32, 1, true, false); 1.12 + private AudioSend audioSend; 1.13 + private AudioFormat outFormat; 1.14 1.15 - /** 1.16 - * Keeps track of all the listeners which have been registered so far. 1.17 - * The first element is <code>null</code>, which represents the zeroth 1.18 - * LWJGL listener which is created automatically. 1.19 - */ 1.20 - public Vector<Listener> listeners = new Vector<Listener>(); 1.21 + /** 1.22 + * Keeps track of all the listeners which have been registered 1.23 + * so far. The first element is <code>null</code>, which 1.24 + * represents the zeroth LWJGL listener which is created 1.25 + * automatically. 1.26 + */ 1.27 + public Vector<Listener> listeners = new Vector<Listener>(); 1.28 1.29 - public void initialize(){ 1.30 - super.initialize(); 1.31 - listeners.add(null); 1.32 + public void initialize(){ 1.33 + super.initialize(); 1.34 + listeners.add(null); 1.35 + } 1.36 + 1.37 + /** 1.38 + * This is to call the native methods which require the OpenAL 1.39 + * device ID. Currently it is obtained through reflection. 1.40 + */ 1.41 + private long deviceID; 1.42 + 1.43 + /** 1.44 + * To ensure that <code>deviceID<code> and 1.45 + * <code>listeners<code> are properly initialized before any 1.46 + * additional listeners are added. 1.47 + */ 1.48 + private CountDownLatch latch = new CountDownLatch(1); 1.49 + 1.50 + /** 1.51 + * Each listener (including the main LWJGL listener) can be 1.52 + * registered with a <code>SoundProcessor</code>, which this 1.53 + * Renderer will call whenever there is new audio data to be 1.54 + * processed. 1.55 + */ 1.56 + public HashMap<Listener, SoundProcessor> soundProcessorMap = 1.57 + new HashMap<Listener, SoundProcessor>(); 1.58 + 1.59 + /** 1.60 + * Create a new slave context on the recorder device which 1.61 + * will render all the sounds in the main LWJGL context with 1.62 + * respect to this listener. 1.63 + */ 1.64 + public void addListener(Listener l) { 1.65 + try {this.latch.await();} 1.66 + catch (InterruptedException e) {e.printStackTrace();} 1.67 + audioSend.addListener(); 1.68 + this.listeners.add(l); 1.69 + l.setRenderer(this); 1.70 + } 1.71 + 1.72 + /** 1.73 + * Whenever new data is rendered in the perspective of this 1.74 + * listener, this Renderer will send that data to the 1.75 + * SoundProcessor of your choosing. 1.76 + */ 1.77 + public void registerSoundProcessor(Listener l, SoundProcessor sp) { 1.78 + this.soundProcessorMap.put(l, sp); 1.79 + } 1.80 + 1.81 + /** 1.82 + * Registers a SoundProcessor for the main LWJGL context. Ig all 1.83 + * you want to do is record the sound you would normally hear in 1.84 + * your application, then this is the only method you have to 1.85 + * worry about. 1.86 + */ 1.87 + public void registerSoundProcessor(SoundProcessor sp){ 1.88 + // register a sound processor for the default listener. 1.89 + this.soundProcessorMap.put(null, sp); 1.90 + } 1.91 + 1.92 + private static final Logger logger = 1.93 + Logger.getLogger(AudioSendRenderer.class.getName()); 1.94 + 1.95 + /** 1.96 + * Instead of taking whatever device is available on the system, 1.97 + * this call creates the "Multiple Audio Send" device, which 1.98 + * supports multiple listeners in a limited capacity. For each 1.99 + * listener, the device renders it not to the sound device, but 1.100 + * instead to buffers which it makes available via JNI. 1.101 + */ 1.102 + public void initInThread(){ 1.103 + try{ 1.104 + switch (JmeSystem.getPlatform()){ 1.105 + case Windows64: 1.106 + Natives.extractNativeLib("windows/audioSend", 1.107 + "OpenAL64", true, true); 1.108 + break; 1.109 + case Windows32: 1.110 + Natives.extractNativeLib("windows/audioSend", 1.111 + "OpenAL32", true, true); 1.112 + break; 1.113 + case Linux64: 1.114 + Natives.extractNativeLib("linux/audioSend", 1.115 + "openal64", true, true); 1.116 + break; 1.117 + case Linux32: 1.118 + Natives.extractNativeLib("linux/audioSend", 1.119 + "openal", true, true); 1.120 + break; 1.121 + } 1.122 } 1.123 - 1.124 - /** 1.125 - * This is to call the native methods which require the OpenAL device ID. 1.126 - * currently it is obtained through reflection. 1.127 - */ 1.128 - private long deviceID; 1.129 - 1.130 - /** 1.131 - * To ensure that <code>deviceID<code> and <code>listeners<code> are 1.132 - * properly initialized before any additional listeners are added. 1.133 - */ 1.134 - private CountDownLatch latch = new CountDownLatch(1); 1.135 - 1.136 - /** 1.137 - * Each listener (including the main LWJGL listener) can be registered 1.138 - * with a <code>SoundProcessor</code>, which this Renderer will call whenever 1.139 - * there is new audio data to be processed. 1.140 - */ 1.141 - public HashMap<Listener, SoundProcessor> soundProcessorMap = 1.142 - new HashMap<Listener, SoundProcessor>(); 1.143 - 1.144 - 1.145 - /** 1.146 - * Create a new slave context on the recorder device which will render all the 1.147 - * sounds in the main LWJGL context with respect to this listener. 1.148 - */ 1.149 - public void addListener(Listener l) { 1.150 - try {this.latch.await();} 1.151 - catch (InterruptedException e) {e.printStackTrace();} 1.152 - audioSend.addListener(); 1.153 - this.listeners.add(l); 1.154 - l.setRenderer(this); 1.155 - } 1.156 - 1.157 - /** 1.158 - * Whenever new data is rendered in the perspective of this listener, 1.159 - * this Renderer will send that data to the SoundProcessor of your choosing. 1.160 - */ 1.161 - public void registerSoundProcessor(Listener l, SoundProcessor sp) { 1.162 - this.soundProcessorMap.put(l, sp); 1.163 - } 1.164 - 1.165 - /** 1.166 - * Registers a SoundProcessor for the main LWJGL context. IF all you want to 1.167 - * do is record the sound you would normally hear in your application, then 1.168 - * this is the only method you have to worry about. 1.169 - */ 1.170 - public void registerSoundProcessor(SoundProcessor sp){ 1.171 - // register a sound processor for the default listener. 1.172 - this.soundProcessorMap.put(null, sp); 1.173 - } 1.174 - 1.175 - private static final Logger logger = 1.176 - Logger.getLogger(AudioSendRenderer.class.getName()); 1.177 + catch (IOException ex) {ex.printStackTrace();} 1.178 1.179 - 1.180 - 1.181 - /** 1.182 - * Instead of taking whatever device is available on the system, this call 1.183 - * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited 1.184 - * capacity. For each listener, the device renders it not to the sound device, but 1.185 - * instead to buffers which it makes available via JNI. 1.186 - */ 1.187 - public void initInThread(){ 1.188 - 1.189 - try{ 1.190 - switch (JmeSystem.getPlatform()){ 1.191 - case Windows64: 1.192 - Natives.extractNativeLib("windows/audioSend", "OpenAL64", true, true); 1.193 - break; 1.194 - case Windows32: 1.195 - Natives.extractNativeLib("windows/audioSend", "OpenAL32", true, true); 1.196 - break; 1.197 - case Linux64: 1.198 - Natives.extractNativeLib("linux/audioSend", "openal64", true, true); 1.199 - break; 1.200 - case Linux32: 1.201 - Natives.extractNativeLib("linux/audioSend", "openal", true, true); 1.202 - break; 1.203 - } 1.204 - } 1.205 - catch (IOException ex) {ex.printStackTrace();} 1.206 - 1.207 - try{ 1.208 + try{ 1.209 if (!AL.isCreated()){ 1.210 AL.create("Multiple Audio Send", 44100, 60, false); 1.211 } 1.212 @@ -139,95 +146,100 @@ 1.213 System.exit(1); 1.214 return; 1.215 } 1.216 - super.initInThread(); 1.217 + super.initInThread(); 1.218 1.219 - ALCdevice device = AL.getDevice(); 1.220 + ALCdevice device = AL.getDevice(); 1.221 1.222 - // RLM: use reflection to grab the ID of our device for use later. 1.223 - try { 1.224 - Field deviceIDField; 1.225 - deviceIDField = ALCdevice.class.getDeclaredField("device"); 1.226 - deviceIDField.setAccessible(true); 1.227 - try {deviceID = (Long)deviceIDField.get(device);} 1.228 - catch (IllegalArgumentException e) {e.printStackTrace();} 1.229 - catch (IllegalAccessException e) {e.printStackTrace();} 1.230 - deviceIDField.setAccessible(false);} 1.231 - catch (SecurityException e) {e.printStackTrace();} 1.232 - catch (NoSuchFieldException e) {e.printStackTrace();} 1.233 + // RLM: use reflection to grab the ID of our device for use 1.234 + // later. 1.235 + try { 1.236 + Field deviceIDField; 1.237 + deviceIDField = ALCdevice.class.getDeclaredField("device"); 1.238 + deviceIDField.setAccessible(true); 1.239 + try {deviceID = (Long)deviceIDField.get(device);} 1.240 + catch (IllegalArgumentException e) {e.printStackTrace();} 1.241 + catch (IllegalAccessException e) {e.printStackTrace();} 1.242 + deviceIDField.setAccessible(false);} 1.243 + catch (SecurityException e) {e.printStackTrace();} 1.244 + catch (NoSuchFieldException e) {e.printStackTrace();} 1.245 1.246 - this.audioSend = new AudioSend(this.deviceID); 1.247 - this.outFormat = audioSend.getAudioFormat(); 1.248 - initBuffer(); 1.249 + this.audioSend = new AudioSend(this.deviceID); 1.250 + this.outFormat = audioSend.getAudioFormat(); 1.251 + initBuffer(); 1.252 1.253 - // The LWJGL context must be established as the master context before 1.254 - // any other listeners can be created on this device. 1.255 - audioSend.initDevice(); 1.256 - // Now, everything is initialized, and it is safe to add more listeners. 1.257 - latch.countDown(); 1.258 + // The LWJGL context must be established as the master context 1.259 + // before any other listeners can be created on this device. 1.260 + audioSend.initDevice(); 1.261 + // Now, everything is initialized, and it is safe to add more 1.262 + // listeners. 1.263 + latch.countDown(); 1.264 + } 1.265 + 1.266 + public void cleanup(){ 1.267 + for(SoundProcessor sp : this.soundProcessorMap.values()){ 1.268 + sp.cleanup(); 1.269 } 1.270 + super.cleanup(); 1.271 + } 1.272 + 1.273 + public void updateAllListeners(){ 1.274 + for (int i = 0; i < this.listeners.size(); i++){ 1.275 + Listener lis = this.listeners.get(i); 1.276 + if (null != lis){ 1.277 + Vector3f location = lis.getLocation(); 1.278 + Vector3f velocity = lis.getVelocity(); 1.279 + Vector3f orientation = lis.getUp(); 1.280 + float gain = lis.getVolume(); 1.281 + audioSend.setNthListener3f 1.282 + (AL10.AL_POSITION, 1.283 + location.x, location.y, location.z, i); 1.284 + audioSend.setNthListener3f 1.285 + (AL10.AL_VELOCITY, 1.286 + velocity.x, velocity.y, velocity.z, i); 1.287 + audioSend.setNthListener3f 1.288 + (AL10.AL_ORIENTATION, 1.289 + orientation.x, orientation.y, orientation.z, i); 1.290 + audioSend.setNthListenerf(AL10.AL_GAIN, gain, i); 1.291 + } 1.292 + } 1.293 + } 1.294 1.295 + private ByteBuffer buffer;; 1.296 + 1.297 + public static final int MIN_FRAMERATE = 10; 1.298 1.299 - public void cleanup(){ 1.300 - for(SoundProcessor sp : this.soundProcessorMap.values()){ 1.301 - sp.cleanup(); 1.302 - } 1.303 - super.cleanup(); 1.304 + private void initBuffer(){ 1.305 + int bufferSize = 1.306 + (int)(this.outFormat.getSampleRate() / 1.307 + ((float)MIN_FRAMERATE)) * 1.308 + this.outFormat.getFrameSize(); 1.309 + 1.310 + this.buffer = BufferUtils.createByteBuffer(bufferSize); 1.311 + } 1.312 + 1.313 + public void dispatchAudio(float tpf){ 1.314 + 1.315 + int samplesToGet = (int) (tpf * outFormat.getSampleRate()); 1.316 + try {latch.await();} 1.317 + catch (InterruptedException e) {e.printStackTrace();} 1.318 + audioSend.step(samplesToGet); 1.319 + updateAllListeners(); 1.320 + 1.321 + for (int i = 0; i < this.listeners.size(); i++){ 1.322 + buffer.clear(); 1.323 + audioSend.getSamples(buffer, samplesToGet, i); 1.324 + SoundProcessor sp = 1.325 + this.soundProcessorMap.get(this.listeners.get(i)); 1.326 + if (null != sp){ 1.327 + sp.process 1.328 + (buffer, 1.329 + samplesToGet*outFormat.getFrameSize(), outFormat);} 1.330 } 1.331 - 1.332 - public void updateAllListeners(){ 1.333 - for (int i = 0; i < this.listeners.size(); i++){ 1.334 - Listener lis = this.listeners.get(i); 1.335 - if (null != lis){ 1.336 - Vector3f location = lis.getLocation(); 1.337 - Vector3f velocity = lis.getVelocity(); 1.338 - Vector3f orientation = lis.getUp(); 1.339 - float gain = lis.getVolume(); 1.340 - audioSend.setNthListener3f(AL10.AL_POSITION, 1.341 - location.x, location.y, location.z, i); 1.342 - audioSend.setNthListener3f(AL10.AL_VELOCITY, 1.343 - velocity.x, velocity.y, velocity.z, i); 1.344 - audioSend.setNthListener3f(AL10.AL_ORIENTATION, 1.345 - orientation.x, orientation.y, orientation.z, i); 1.346 - audioSend.setNthListenerf(AL10.AL_GAIN, gain, i); 1.347 - } 1.348 - } 1.349 - } 1.350 - 1.351 - 1.352 - private ByteBuffer buffer;; 1.353 - 1.354 - public static final int MIN_FRAMERATE = 10; 1.355 - 1.356 - private void initBuffer(){ 1.357 - int bufferSize = (int)(this.outFormat.getSampleRate() / ((float)MIN_FRAMERATE)) * 1.358 - this.outFormat.getFrameSize(); 1.359 - this.buffer = BufferUtils.createByteBuffer(bufferSize); 1.360 - } 1.361 - /* 1.362 - 1.363 - */ 1.364 - public void dispatchAudio(float tpf){ 1.365 + } 1.366 1.367 - int samplesToGet = (int) (tpf * outFormat.getSampleRate()); 1.368 - try {latch.await();} 1.369 - catch (InterruptedException e) {e.printStackTrace();} 1.370 - audioSend.step(samplesToGet); 1.371 - updateAllListeners(); 1.372 - 1.373 - for (int i = 0; i < this.listeners.size(); i++){ 1.374 - buffer.clear(); 1.375 - audioSend.getSamples(buffer, samplesToGet, i); 1.376 - SoundProcessor sp = 1.377 - this.soundProcessorMap.get(this.listeners.get(i)); 1.378 - if (null != sp){sp.process(buffer, samplesToGet*outFormat.getFrameSize(), outFormat);} 1.379 - } 1.380 - 1.381 - } 1.382 - 1.383 - public void update(float tpf){ 1.384 - super.update(tpf); 1.385 + public void update(float tpf){ 1.386 + super.update(tpf); 1.387 dispatchAudio(tpf); 1.388 - } 1.389 - 1.390 + } 1.391 } 1.392
2.1 --- a/src/com/aurellem/capture/audio/SoundProcessor.java Fri Feb 10 04:07:05 2012 -0700 2.2 +++ b/src/com/aurellem/capture/audio/SoundProcessor.java Sat Feb 11 12:25:26 2012 -0700 2.3 @@ -7,10 +7,9 @@ 2.4 public interface SoundProcessor { 2.5 2.6 /** 2.7 - * Called when the SoundProcessor is being destroyed, and 2.8 - * there are no more samples to process. This happens at the 2.9 - * latest when the Application is shutting down. 2.10 - * 2.11 + * Called when the SoundProcessor is being destroyed, and there 2.12 + * are no more samples to process. This happens at the latest 2.13 + * when the Application is shutting down. 2.14 */ 2.15 void cleanup(); 2.16 2.17 @@ -22,7 +21,8 @@ 2.18 * 2.19 * @param audioSamples a ByteBuffer containing processed audio 2.20 * samples 2.21 - * @param numSamples the number of samples, in bytes, that are valid 2.22 + * @param numSamples the number of samples, in bytes, that are 2.23 + * valid 2.24 * @param format the format of the audio samples in audioSamples 2.25 */ 2.26 void process(ByteBuffer audioSamples, int numSamples, AudioFormat format);
3.1 --- a/src/com/aurellem/capture/examples/Advanced.java Fri Feb 10 04:07:05 2012 -0700 3.2 +++ b/src/com/aurellem/capture/examples/Advanced.java Sat Feb 11 12:25:26 2012 -0700 3.3 @@ -54,253 +54,265 @@ 3.4 3.5 public class Advanced extends SimpleApplication { 3.6 3.7 - /** 3.8 - * You will see three grey cubes, a blue sphere, and a path which 3.9 - * circles each cube. The blue sphere is generating a constant 3.10 - * monotone sound as it moves along the track. Each cube is 3.11 - * listening for sound; when a cube hears sound whose intensity is 3.12 - * greater than a certain threshold, it changes its color from 3.13 - * grey to green. 3.14 - * 3.15 - * Each cube is also saving whatever it hears to a file. The 3.16 - * scene from the perspective of the viewer is also saved to a 3.17 - * video file. When you listen to each of the sound files 3.18 - * alongside the video, the sound will get louder when the sphere 3.19 - * approaches the cube that generated that sound file. This 3.20 - * shows that each listener is hearing the world from its own 3.21 - * perspective. 3.22 - * 3.23 - */ 3.24 - public static void main(String[] args) { 3.25 - Advanced app = new Advanced(); 3.26 - AppSettings settings = new AppSettings(true); 3.27 - settings.setAudioRenderer(AurellemSystemDelegate.SEND); 3.28 - JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); 3.29 - app.setSettings(settings); 3.30 - app.setShowSettings(false); 3.31 - app.setPauseOnLostFocus(false); 3.32 + /** 3.33 + * You will see three grey cubes, a blue sphere, and a path which 3.34 + * circles each cube. The blue sphere is generating a constant 3.35 + * monotone sound as it moves along the track. Each cube is 3.36 + * listening for sound; when a cube hears sound whose intensity is 3.37 + * greater than a certain threshold, it changes its color from 3.38 + * grey to green. 3.39 + * 3.40 + * Each cube is also saving whatever it hears to a file. The 3.41 + * scene from the perspective of the viewer is also saved to a 3.42 + * video file. When you listen to each of the sound files 3.43 + * alongside the video, the sound will get louder when the sphere 3.44 + * approaches the cube that generated that sound file. This 3.45 + * shows that each listener is hearing the world from its own 3.46 + * perspective. 3.47 + * 3.48 + */ 3.49 + public static void main(String[] args) { 3.50 + Advanced app = new Advanced(); 3.51 + AppSettings settings = new AppSettings(true); 3.52 + settings.setAudioRenderer(AurellemSystemDelegate.SEND); 3.53 + JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); 3.54 + app.setSettings(settings); 3.55 + app.setShowSettings(false); 3.56 + app.setPauseOnLostFocus(false); 3.57 3.58 - try { 3.59 - //Capture.captureVideo(app, File.createTempFile("advanced",".avi")); 3.60 - Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); 3.61 - } 3.62 - catch (IOException e) {e.printStackTrace();} 3.63 + try { 3.64 + //Capture.captureVideo(app, File.createTempFile("advanced",".avi")); 3.65 + Capture.captureAudio(app, File.createTempFile("advanced",".wav")); 3.66 + } 3.67 + catch (IOException e) {e.printStackTrace();} 3.68 3.69 - app.start(); 3.70 + app.start(); 3.71 + } 3.72 + 3.73 + private Geometry bell; 3.74 + private Geometry ear1; 3.75 + private Geometry ear2; 3.76 + private Geometry ear3; 3.77 + private AudioNode music; 3.78 + private MotionTrack motionControl; 3.79 + private IsoTimer motionTimer = new IsoTimer(60); 3.80 + 3.81 + private Geometry makeEar(Node root, Vector3f position){ 3.82 + Material mat = new Material(assetManager, 3.83 + "Common/MatDefs/Misc/Unshaded.j3md"); 3.84 + Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); 3.85 + ear.setLocalTranslation(position); 3.86 + mat.setColor("Color", ColorRGBA.Green); 3.87 + ear.setMaterial(mat); 3.88 + root.attachChild(ear); 3.89 + return ear; 3.90 + } 3.91 + 3.92 + private Vector3f[] path = new Vector3f[]{ 3.93 + // loop 1 3.94 + new Vector3f(0, 0, 0), 3.95 + new Vector3f(0, 0, -10), 3.96 + new Vector3f(-2, 0, -14), 3.97 + new Vector3f(-6, 0, -20), 3.98 + new Vector3f(0, 0, -26), 3.99 + new Vector3f(6, 0, -20), 3.100 + new Vector3f(0, 0, -14), 3.101 + new Vector3f(-6, 0, -20), 3.102 + new Vector3f(0, 0, -26), 3.103 + new Vector3f(6, 0, -20), 3.104 + // loop 2 3.105 + new Vector3f(5, 0, -5), 3.106 + new Vector3f(7, 0, 1.5f), 3.107 + new Vector3f(14, 0, 2), 3.108 + new Vector3f(20, 0, 6), 3.109 + new Vector3f(26, 0, 0), 3.110 + new Vector3f(20, 0, -6), 3.111 + new Vector3f(14, 0, 0), 3.112 + new Vector3f(20, 0, 6), 3.113 + new Vector3f(26, 0, 0), 3.114 + new Vector3f(20, 0, -6), 3.115 + new Vector3f(14, 0, 0), 3.116 + // loop 3 3.117 + new Vector3f(8, 0, 7.5f), 3.118 + new Vector3f(7, 0, 10.5f), 3.119 + new Vector3f(6, 0, 20), 3.120 + new Vector3f(0, 0, 26), 3.121 + new Vector3f(-6, 0, 20), 3.122 + new Vector3f(0, 0, 14), 3.123 + new Vector3f(6, 0, 20), 3.124 + new Vector3f(0, 0, 26), 3.125 + new Vector3f(-6, 0, 20), 3.126 + new Vector3f(0, 0, 14), 3.127 + // begin ellipse 3.128 + new Vector3f(16, 5, 20), 3.129 + new Vector3f(0, 0, 26), 3.130 + new Vector3f(-16, -10, 20), 3.131 + new Vector3f(0, 0, 14), 3.132 + new Vector3f(16, 20, 20), 3.133 + new Vector3f(0, 0, 26), 3.134 + new Vector3f(-10, -25, 10), 3.135 + new Vector3f(-10, 0, 0), 3.136 + // come at me! 3.137 + new Vector3f(-28.00242f, 48.005623f, -34.648228f), 3.138 + new Vector3f(0, 0 , -20), 3.139 + }; 3.140 + 3.141 + private void createScene() { 3.142 + Material mat = new Material(assetManager, 3.143 + "Common/MatDefs/Misc/Unshaded.j3md"); 3.144 + bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); 3.145 + mat.setColor("Color", ColorRGBA.Blue); 3.146 + bell.setMaterial(mat); 3.147 + rootNode.attachChild(bell); 3.148 + 3.149 + ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); 3.150 + ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); 3.151 + ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); 3.152 + 3.153 + MotionPath track = new MotionPath(); 3.154 + 3.155 + for (Vector3f v : path){ 3.156 + track.addWayPoint(v); 3.157 + } 3.158 + track.setCurveTension(0.80f); 3.159 + 3.160 + motionControl = new MotionTrack(bell,track); 3.161 + // for now, use reflection to change the timer... 3.162 + // motionControl.setTimer(new IsoTimer(60)); 3.163 + 3.164 + try { 3.165 + Field timerField; 3.166 + timerField = 3.167 + AbstractCinematicEvent.class.getDeclaredField("timer"); 3.168 + timerField.setAccessible(true); 3.169 + try {timerField.set(motionControl, motionTimer);} 3.170 + catch (IllegalArgumentException e) {e.printStackTrace();} 3.171 + catch (IllegalAccessException e) {e.printStackTrace();} 3.172 + } 3.173 + catch (SecurityException e) {e.printStackTrace();} 3.174 + catch (NoSuchFieldException e) {e.printStackTrace();} 3.175 + 3.176 + 3.177 + motionControl.setDirectionType 3.178 + (MotionTrack.Direction.PathAndRotation); 3.179 + motionControl.setRotation 3.180 + (new Quaternion().fromAngleNormalAxis 3.181 + (-FastMath.HALF_PI, Vector3f.UNIT_Y)); 3.182 + motionControl.setInitialDuration(20f); 3.183 + motionControl.setSpeed(1f); 3.184 + 3.185 + track.enableDebugShape(assetManager, rootNode); 3.186 + positionCamera(); 3.187 + } 3.188 + 3.189 + private void positionCamera(){ 3.190 + this.cam.setLocation 3.191 + (new Vector3f(-28.00242f, 48.005623f, -34.648228f)); 3.192 + this.cam.setRotation 3.193 + (new Quaternion 3.194 + (0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); 3.195 + } 3.196 + 3.197 + private void initAudio() { 3.198 + org.lwjgl.input.Mouse.setGrabbed(false); 3.199 + music = new AudioNode(assetManager, 3.200 + "Sound/Effects/Beep.ogg", false); 3.201 + rootNode.attachChild(music); 3.202 + audioRenderer.playSource(music); 3.203 + music.setPositional(true); 3.204 + music.setVolume(1f); 3.205 + music.setReverbEnabled(false); 3.206 + music.setDirectional(false); 3.207 + music.setMaxDistance(200.0f); 3.208 + music.setRefDistance(1f); 3.209 + //music.setRolloffFactor(1f); 3.210 + music.setLooping(false); 3.211 + audioRenderer.pauseSource(music); 3.212 + } 3.213 + 3.214 + public class Dancer implements SoundProcessor { 3.215 + Geometry entity; 3.216 + float scale = 2; 3.217 + public Dancer(Geometry entity){ 3.218 + this.entity = entity; 3.219 } 3.220 3.221 + /** 3.222 + * this method is irrelevant since there is no state to cleanup. 3.223 + */ 3.224 + public void cleanup() {} 3.225 3.226 - private Geometry bell; 3.227 - private Geometry ear1; 3.228 - private Geometry ear2; 3.229 - private Geometry ear3; 3.230 - private AudioNode music; 3.231 - private MotionTrack motionControl; 3.232 - private IsoTimer motionTimer = new IsoTimer(60); 3.233 3.234 - private Geometry makeEar(Node root, Vector3f position){ 3.235 - Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); 3.236 - Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); 3.237 - ear.setLocalTranslation(position); 3.238 - mat.setColor("Color", ColorRGBA.Green); 3.239 - ear.setMaterial(mat); 3.240 - root.attachChild(ear); 3.241 - return ear; 3.242 - } 3.243 + /** 3.244 + * Respond to sound! This is the brain of an AI entity that 3.245 + * hears its surroundings and reacts to them. 3.246 + */ 3.247 + public void process(ByteBuffer audioSamples, 3.248 + int numSamples, AudioFormat format) { 3.249 + audioSamples.clear(); 3.250 + byte[] data = new byte[numSamples]; 3.251 + float[] out = new float[numSamples]; 3.252 + audioSamples.get(data); 3.253 + FloatSampleTools. 3.254 + byte2floatInterleaved 3.255 + (data, 0, out, 0, numSamples/format.getFrameSize(), format); 3.256 3.257 - private Vector3f[] path = new Vector3f[]{ 3.258 - // loop 1 3.259 - new Vector3f(0, 0, 0), 3.260 - new Vector3f(0, 0, -10), 3.261 - new Vector3f(-2, 0, -14), 3.262 - new Vector3f(-6, 0, -20), 3.263 - new Vector3f(0, 0, -26), 3.264 - new Vector3f(6, 0, -20), 3.265 - new Vector3f(0, 0, -14), 3.266 - new Vector3f(-6, 0, -20), 3.267 - new Vector3f(0, 0, -26), 3.268 - new Vector3f(6, 0, -20), 3.269 - // loop 2 3.270 - new Vector3f(5, 0, -5), 3.271 - new Vector3f(7, 0, 1.5f), 3.272 - new Vector3f(14, 0, 2), 3.273 - new Vector3f(20, 0, 6), 3.274 - new Vector3f(26, 0, 0), 3.275 - new Vector3f(20, 0, -6), 3.276 - new Vector3f(14, 0, 0), 3.277 - new Vector3f(20, 0, 6), 3.278 - new Vector3f(26, 0, 0), 3.279 - new Vector3f(20, 0, -6), 3.280 - new Vector3f(14, 0, 0), 3.281 - // loop 3 3.282 - new Vector3f(8, 0, 7.5f), 3.283 - new Vector3f(7, 0, 10.5f), 3.284 - new Vector3f(6, 0, 20), 3.285 - new Vector3f(0, 0, 26), 3.286 - new Vector3f(-6, 0, 20), 3.287 - new Vector3f(0, 0, 14), 3.288 - new Vector3f(6, 0, 20), 3.289 - new Vector3f(0, 0, 26), 3.290 - new Vector3f(-6, 0, 20), 3.291 - new Vector3f(0, 0, 14), 3.292 - // begin ellipse 3.293 - new Vector3f(16, 5, 20), 3.294 - new Vector3f(0, 0, 26), 3.295 - new Vector3f(-16, -10, 20), 3.296 - new Vector3f(0, 0, 14), 3.297 - new Vector3f(16, 20, 20), 3.298 - new Vector3f(0, 0, 26), 3.299 - new Vector3f(-10, -25, 10), 3.300 - new Vector3f(-10, 0, 0), 3.301 - // come at me! 3.302 - new Vector3f(-28.00242f, 48.005623f, -34.648228f), 3.303 - new Vector3f(0, 0 , -20), 3.304 - }; 3.305 + float max = Float.NEGATIVE_INFINITY; 3.306 + for (float f : out){if (f > max) max = f;} 3.307 + audioSamples.clear(); 3.308 3.309 - private void createScene() { 3.310 - Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); 3.311 - bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); 3.312 - mat.setColor("Color", ColorRGBA.Blue); 3.313 - bell.setMaterial(mat); 3.314 - rootNode.attachChild(bell); 3.315 + if (max > 0.1){ 3.316 + entity.getMaterial().setColor("Color", ColorRGBA.Green); 3.317 + } 3.318 + else { 3.319 + entity.getMaterial().setColor("Color", ColorRGBA.Gray); 3.320 + } 3.321 + } 3.322 + } 3.323 3.324 - ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); 3.325 - ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); 3.326 - ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); 3.327 + private void prepareEar(Geometry ear, int n){ 3.328 + if (this.audioRenderer instanceof MultiListener){ 3.329 + MultiListener rf = (MultiListener)this.audioRenderer; 3.330 3.331 - MotionPath track = new MotionPath(); 3.332 + Listener auxListener = new Listener(); 3.333 + auxListener.setLocation(ear.getLocalTranslation()); 3.334 3.335 - for (Vector3f v : path){ 3.336 - track.addWayPoint(v); 3.337 - } 3.338 - track.setCurveTension(0.80f); 3.339 + rf.addListener(auxListener); 3.340 + WaveFileWriter aux = null; 3.341 3.342 - motionControl = new MotionTrack(bell,track); 3.343 - // for now, use reflection to change the timer... 3.344 - // motionControl.setTimer(new IsoTimer(60)); 3.345 - 3.346 - try { 3.347 - Field timerField; 3.348 - timerField = AbstractCinematicEvent.class.getDeclaredField("timer"); 3.349 - timerField.setAccessible(true); 3.350 - try {timerField.set(motionControl, motionTimer);} 3.351 - catch (IllegalArgumentException e) {e.printStackTrace();} 3.352 - catch (IllegalAccessException e) {e.printStackTrace();} 3.353 - } 3.354 - catch (SecurityException e) {e.printStackTrace();} 3.355 - catch (NoSuchFieldException e) {e.printStackTrace();} 3.356 + try { 3.357 + aux = new WaveFileWriter 3.358 + (File.createTempFile("advanced-audio-" + n, ".wav"));} 3.359 + catch (IOException e) {e.printStackTrace();} 3.360 3.361 + rf.registerSoundProcessor 3.362 + (auxListener, 3.363 + new CompositeSoundProcessor(new Dancer(ear), aux)); 3.364 + } 3.365 + } 3.366 3.367 - motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); 3.368 - motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); 3.369 - motionControl.setInitialDuration(20f); 3.370 - motionControl.setSpeed(1f); 3.371 + public void simpleInitApp() { 3.372 + this.setTimer(new IsoTimer(60)); 3.373 + initAudio(); 3.374 3.375 - track.enableDebugShape(assetManager, rootNode); 3.376 - positionCamera(); 3.377 + createScene(); 3.378 + 3.379 + prepareEar(ear1, 1); 3.380 + prepareEar(ear2, 1); 3.381 + prepareEar(ear3, 1); 3.382 + 3.383 + motionControl.play(); 3.384 + } 3.385 + 3.386 + public void simpleUpdate(float tpf) { 3.387 + motionTimer.update(); 3.388 + if (music.getStatus() != AudioNode.Status.Playing){ 3.389 + music.play(); 3.390 } 3.391 - 3.392 - 3.393 - private void positionCamera(){ 3.394 - this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); 3.395 - this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); 3.396 - } 3.397 - 3.398 - private void initAudio() { 3.399 - org.lwjgl.input.Mouse.setGrabbed(false); 3.400 - music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); 3.401 - 3.402 - rootNode.attachChild(music); 3.403 - audioRenderer.playSource(music); 3.404 - music.setPositional(true); 3.405 - music.setVolume(1f); 3.406 - music.setReverbEnabled(false); 3.407 - music.setDirectional(false); 3.408 - music.setMaxDistance(200.0f); 3.409 - music.setRefDistance(1f); 3.410 - //music.setRolloffFactor(1f); 3.411 - music.setLooping(false); 3.412 - audioRenderer.pauseSource(music); 3.413 - } 3.414 - 3.415 - public class Dancer implements SoundProcessor { 3.416 - Geometry entity; 3.417 - float scale = 2; 3.418 - public Dancer(Geometry entity){ 3.419 - this.entity = entity; 3.420 - } 3.421 - 3.422 - /** 3.423 - * this method is irrelevant since there is no state to cleanup. 3.424 - */ 3.425 - public void cleanup() {} 3.426 - 3.427 - 3.428 - /** 3.429 - * Respond to sound! This is the brain of an AI entity that 3.430 - * hears its surroundings and reacts to them. 3.431 - */ 3.432 - public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { 3.433 - audioSamples.clear(); 3.434 - byte[] data = new byte[numSamples]; 3.435 - float[] out = new float[numSamples]; 3.436 - audioSamples.get(data); 3.437 - FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, 3.438 - numSamples/format.getFrameSize(), format); 3.439 - 3.440 - float max = Float.NEGATIVE_INFINITY; 3.441 - for (float f : out){if (f > max) max = f;} 3.442 - audioSamples.clear(); 3.443 - 3.444 - if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} 3.445 - else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} 3.446 - } 3.447 - } 3.448 - 3.449 - private void prepareEar(Geometry ear, int n){ 3.450 - if (this.audioRenderer instanceof MultiListener){ 3.451 - MultiListener rf = (MultiListener)this.audioRenderer; 3.452 - 3.453 - Listener auxListener = new Listener(); 3.454 - auxListener.setLocation(ear.getLocalTranslation()); 3.455 - 3.456 - rf.addListener(auxListener); 3.457 - WaveFileWriter aux = null; 3.458 - 3.459 - try {aux = new WaveFileWriter(File.createTempFile("advanced-audio-" + n, ".wav"));} 3.460 - catch (IOException e) {e.printStackTrace();} 3.461 - 3.462 - rf.registerSoundProcessor(auxListener, 3.463 - new CompositeSoundProcessor(new Dancer(ear), aux)); 3.464 - 3.465 - } 3.466 - } 3.467 - 3.468 - 3.469 - public void simpleInitApp() { 3.470 - this.setTimer(new IsoTimer(60)); 3.471 - initAudio(); 3.472 - 3.473 - createScene(); 3.474 - 3.475 - prepareEar(ear1, 1); 3.476 - prepareEar(ear2, 1); 3.477 - prepareEar(ear3, 1); 3.478 - 3.479 - motionControl.play(); 3.480 - 3.481 - } 3.482 - 3.483 - public void simpleUpdate(float tpf) { 3.484 - motionTimer.update(); 3.485 - if (music.getStatus() != AudioNode.Status.Playing){ 3.486 - music.play(); 3.487 - } 3.488 - Vector3f loc = cam.getLocation(); 3.489 - Quaternion rot = cam.getRotation(); 3.490 - listener.setLocation(loc); 3.491 - listener.setRotation(rot); 3.492 - music.setLocalTranslation(bell.getLocalTranslation()); 3.493 - } 3.494 - 3.495 + Vector3f loc = cam.getLocation(); 3.496 + Quaternion rot = cam.getRotation(); 3.497 + listener.setLocation(loc); 3.498 + listener.setRotation(rot); 3.499 + music.setLocalTranslation(bell.getLocalTranslation()); 3.500 + } 3.501 }