Mercurial > jmeCapture
comparison src/com/aurellem/capture/examples/AdvancedAudio.java @ 33:c4bfbf5d090e
starting to get something that can percieve sound
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sun, 30 Oct 2011 13:57:16 -0700 |
parents | be37291c62b8 |
children | 13d354e1184b |
comparison
equal
deleted
inserted
replaced
31:b67ffa8aa0b9 | 33:c4bfbf5d090e |
---|---|
1 package com.aurellem.capture.examples; | 1 package com.aurellem.capture.examples; |
2 | 2 |
3 import java.io.File; | 3 import java.io.File; |
4 import java.io.FileNotFoundException; | |
5 import java.io.IOException; | |
4 import java.nio.ByteBuffer; | 6 import java.nio.ByteBuffer; |
5 | 7 |
6 import javax.sound.sampled.AudioFormat; | 8 import javax.sound.sampled.AudioFormat; |
7 | 9 |
10 import org.tritonus.share.sampled.FloatSampleTools; | |
11 | |
12 import com.aurellem.capture.Capture; | |
8 import com.aurellem.capture.IsoTimer; | 13 import com.aurellem.capture.IsoTimer; |
14 import com.aurellem.capture.audio.CompositeSoundProcessor; | |
9 import com.aurellem.capture.audio.MultiListener; | 15 import com.aurellem.capture.audio.MultiListener; |
10 import com.aurellem.capture.audio.SoundProcessor; | 16 import com.aurellem.capture.audio.SoundProcessor; |
17 import com.aurellem.capture.audio.WaveFileWriter; | |
11 import com.jme3.app.SimpleApplication; | 18 import com.jme3.app.SimpleApplication; |
12 import com.jme3.audio.AudioNode; | 19 import com.jme3.audio.AudioNode; |
13 import com.jme3.audio.Listener; | 20 import com.jme3.audio.Listener; |
14 import com.jme3.cinematic.MotionPath; | 21 import com.jme3.cinematic.MotionPath; |
15 import com.jme3.cinematic.events.MotionTrack; | 22 import com.jme3.cinematic.events.MotionTrack; |
57 settings.setAudioRenderer("Send"); | 64 settings.setAudioRenderer("Send"); |
58 app.setSettings(settings); | 65 app.setSettings(settings); |
59 app.setShowSettings(false); | 66 app.setShowSettings(false); |
60 app.setPauseOnLostFocus(false); | 67 app.setPauseOnLostFocus(false); |
61 org.lwjgl.input.Mouse.setGrabbed(false); | 68 org.lwjgl.input.Mouse.setGrabbed(false); |
69 try {Capture.captureVideo(app, new File("/home/r/tmp/out.avi"));} | |
70 catch (IOException e) {e.printStackTrace();} | |
62 app.start(); | 71 app.start(); |
63 } | 72 } |
64 | 73 |
65 private MotionTrack motionControl; | 74 private MotionTrack motionControl; |
66 | 75 |
160 | 169 |
161 track.setCurveTension(0.80f); | 170 track.setCurveTension(0.80f); |
162 | 171 |
163 | 172 |
164 motionControl = new MotionTrack(bell,track); | 173 motionControl = new MotionTrack(bell,track); |
174 motionControl.setTimer(new IsoTimer(60)); | |
165 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); | 175 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); |
166 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); | 176 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); |
167 motionControl.setInitialDuration(10f); | 177 motionControl.setInitialDuration(20f); |
168 motionControl.setSpeed(0.1f); | 178 motionControl.setSpeed(1f); |
169 | 179 |
170 | 180 |
171 track.enableDebugShape(assetManager, rootNode); | 181 track.enableDebugShape(assetManager, rootNode); |
172 | 182 |
173 | 183 |
193 | 203 |
194 music = new AudioNode(assetManager, "Sound/Environment/pure.wav", false); | 204 music = new AudioNode(assetManager, "Sound/Environment/pure.wav", false); |
195 | 205 |
196 rootNode.attachChild(music); | 206 rootNode.attachChild(music); |
197 audioRenderer.playSource(music); | 207 audioRenderer.playSource(music); |
198 | |
199 music.setVolume(1f); | |
200 music.setPositional(true); | 208 music.setPositional(true); |
201 music.setMaxDistance(200.0f); | 209 //music.setVolume(1f); |
202 music.setRefDistance(0.1f); | 210 |
203 music.setRolloffFactor(5f); | 211 //music.setMaxDistance(200.0f); |
212 //music.setRefDistance(0.1f); | |
213 //music.setRolloffFactor(5f); | |
204 audioRenderer.pauseSource(music); | 214 audioRenderer.pauseSource(music); |
205 | 215 |
206 } | 216 } |
207 | 217 |
208 | 218 |
220 public class Dancer implements SoundProcessor { | 230 public class Dancer implements SoundProcessor { |
221 | 231 |
222 Spatial entity; | 232 Spatial entity; |
223 | 233 |
224 float scale = 2; | 234 float scale = 2; |
225 | 235 String debug; |
226 public Dancer(Spatial entity){ | 236 public Dancer(Spatial entity, String debug){ |
227 this.entity = entity; | 237 this.entity = entity; |
238 this.debug = debug; | |
228 } | 239 } |
229 | 240 |
230 /** | 241 /** |
231 * this method is irrelevant since there is no state to cleanup. | 242 * this method is irrelevant since there is no state to cleanup. |
232 */ | 243 */ |
236 /** | 247 /** |
237 * Dance to the beat! This is the brain of an AI entity that | 248 * Dance to the beat! This is the brain of an AI entity that |
238 * hears it's surroundings and reacts to them. | 249 * hears it's surroundings and reacts to them. |
239 */ | 250 */ |
240 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { | 251 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { |
241 //System.out.println("I'm DANCING <3"); | 252 audioSamples.clear(); |
242 entity.scale(this.scale); | 253 byte[] data = new byte[numSamples]; |
243 if (this.scale == 2f){this.scale = 0.5f;} | 254 float[] out = new float[numSamples]; |
244 else {this.scale = 2;} | 255 audioSamples.get(data); |
256 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, | |
257 numSamples/format.getFrameSize(), format); | |
258 | |
259 float max = Float.NEGATIVE_INFINITY; | |
260 for (float f : out){if (f > max) max = f;} | |
261 | |
262 System.out.println(debug); | |
263 System.out.println(max); | |
264 | |
265 | |
266 | |
267 //entity.scale(this.scale); | |
268 //if (this.scale == 2f){this.scale = 0.5f;} | |
269 //else {this.scale = 2;} | |
245 } | 270 } |
246 | 271 |
247 | 272 |
248 } | 273 } |
249 | 274 |
258 if (this.audioRenderer instanceof MultiListener){ | 283 if (this.audioRenderer instanceof MultiListener){ |
259 MultiListener rf = (MultiListener)this.audioRenderer; | 284 MultiListener rf = (MultiListener)this.audioRenderer; |
260 | 285 |
261 | 286 |
262 rf.addListener(auxListener); | 287 rf.addListener(auxListener); |
263 | 288 WaveFileWriter writer = null; |
264 //rf.registerSoundProcessor(new WaveFileWriter(data1)); | 289 WaveFileWriter writer2 = null; |
265 rf.registerSoundProcessor(auxListener, new Dancer(ear1)); | 290 auxListener.setLocation(ear1.getLocalTranslation()); |
266 | 291 listener.setLocation(ear1.getLocalTranslation()); |
292 try {writer = new WaveFileWriter(new File("/home/r/tmp/out.wav"));} | |
293 catch (FileNotFoundException e) {e.printStackTrace();} | |
294 | |
295 try {writer2 = new WaveFileWriter(new File("/home/r/tmp/outmain.wav"));} | |
296 catch (FileNotFoundException e) {e.printStackTrace();} | |
297 | |
298 rf.registerSoundProcessor(auxListener, | |
299 new CompositeSoundProcessor(new Dancer(ear1, "aux"), writer)); | |
300 | |
301 rf.registerSoundProcessor( | |
302 new CompositeSoundProcessor(new Dancer(ear1, "main"), writer2)); | |
267 } | 303 } |
268 | 304 |
269 motionControl.play(); | 305 motionControl.play(); |
270 } | 306 } |
271 | 307 |
283 @Override | 319 @Override |
284 public void onAction(String name, boolean keyPressed, float tpf) { | 320 public void onAction(String name, boolean keyPressed, float tpf) { |
285 if (name.equals("Shoot") && !keyPressed) { | 321 if (name.equals("Shoot") && !keyPressed) { |
286 System.out.println("I'm playing! <3"); | 322 System.out.println("I'm playing! <3"); |
287 System.out.println(bell.getLocalTranslation().subtract(cam.getLocation()).length()); | 323 System.out.println(bell.getLocalTranslation().subtract(cam.getLocation()).length()); |
288 | 324 bell.getMaterial().setColor("Color", ColorRGBA.randomColor()); |
289 audioRenderer.playSource(music); | 325 audioRenderer.playSource(music); |
290 System.out.println(music.getRefDistance()); | 326 System.out.println(music.getRefDistance()); |
291 | 327 |
292 } | 328 } |
293 } | 329 } |
295 | 331 |
296 /** Move the listener with the camera - for 3D audio. */ | 332 /** Move the listener with the camera - for 3D audio. */ |
297 | 333 |
298 | 334 |
299 private Vector3f prevBellPos = Vector3f.ZERO; | 335 private Vector3f prevBellPos = Vector3f.ZERO; |
336 | |
337 | |
300 public void simpleUpdate(float tpf) { | 338 public void simpleUpdate(float tpf) { |
301 //Vector3f loc = cam.getLocation(); | 339 //Vector3f loc = cam.getLocation(); |
302 //Quaternion rot = cam.getRotation(); | 340 //Quaternion rot = cam.getRotation(); |
303 //listener.setLocation(loc); | 341 //listener.setLocation(loc); |
304 //listener.setRotation(rot); | 342 //listener.setRotation(rot); |
305 | 343 |
306 | 344 |
307 listener.setLocation(cam.getLocation()); | 345 //listener.setLocation(cam.getLocation()); |
308 listener.setRotation(cam.getRotation()); | 346 //listener.setRotation(cam.getRotation()); |
309 //auxListener.setLocation(loc); | 347 //auxListener.setLocation(loc); |
310 //auxListener.setRotation(rot); | 348 //auxListener.setRotation(rot); |
311 //if (music.getStatus() == AudioNode.Status.Stopped){ | 349 if (music.getStatus() != AudioNode.Status.Playing){ |
312 | 350 audioRenderer.playSource(music); |
313 //music.playInstance(); | 351 bell.getMaterial().setColor("Color", ColorRGBA.randomColor()); |
314 //} | 352 } |
315 //audioRenderer.updateSourceParam(music, AudioParam.Direction); | 353 //audioRenderer.updateSourceParam(music, AudioParam.Direction); |
316 | 354 |
317 Vector3f bellVelocity = bell.getLocalTranslation().subtract(prevBellPos).mult(1.0f/tpf); | 355 Vector3f bellVelocity = bell.getLocalTranslation().subtract(prevBellPos).mult(1.0f/tpf); |
318 prevBellPos = bell.getLocalTranslation(); | 356 prevBellPos = bell.getLocalTranslation(); |
319 | 357 |