view src/com/aurellem/capture/examples/AdvancedAudio.java @ 40:56dc950feaed

added README
author Robert McIntyre <rlm@mit.edu>
date Thu, 03 Nov 2011 16:39:32 -0700
parents 784a3f4e6202
children
line wrap: on
line source
1 package com.aurellem.capture.examples;
3 import java.io.File;
4 import java.io.FileNotFoundException;
5 import java.io.IOException;
6 import java.nio.ByteBuffer;
7 import java.util.logging.Level;
8 import java.util.logging.Logger;
10 import javax.sound.sampled.AudioFormat;
12 import org.tritonus.share.sampled.FloatSampleTools;
14 import com.aurellem.capture.Capture;
15 import com.aurellem.capture.IsoTimer;
16 import com.aurellem.capture.audio.CompositeSoundProcessor;
17 import com.aurellem.capture.audio.MultiListener;
18 import com.aurellem.capture.audio.SoundProcessor;
19 import com.aurellem.capture.audio.WaveFileWriter;
20 import com.jme3.app.SimpleApplication;
21 import com.jme3.audio.AudioNode;
22 import com.jme3.audio.Listener;
23 import com.jme3.audio.ListenerParam;
24 import com.jme3.cinematic.MotionPath;
25 import com.jme3.cinematic.events.MotionTrack;
26 import com.jme3.input.controls.ActionListener;
27 import com.jme3.input.controls.MouseButtonTrigger;
28 import com.jme3.light.DirectionalLight;
29 import com.jme3.material.Material;
30 import com.jme3.math.ColorRGBA;
31 import com.jme3.math.FastMath;
32 import com.jme3.math.Quaternion;
33 import com.jme3.math.Vector3f;
34 import com.jme3.scene.Geometry;
35 import com.jme3.scene.Node;
36 import com.jme3.scene.shape.Box;
37 import com.jme3.scene.shape.Sphere;
38 import com.jme3.system.AppSettings;
41 /**
42 *
43 * Demonstrates advanced use of the audio capture and recording features.
44 * Multiple perspectives of the same scene are simultaneously rendered to
45 * different sound files.
46 *
47 * A key limitation of the way multiple listeners are implemented is that
48 * only 3D positioning effects are realized for listeners other than the
49 * main LWJGL listener. This means that audio effects such as environment
50 * settings will *not* be heard on any auxiliary listeners, though sound
51 * attenuation will work correctly.
52 *
53 * Multiple listeners as realized here might be used to make AI entities
54 * that can each hear the world from their own perspective.
55 *
56 * @author Robert McIntyre
57 *
58 */
60 public class AdvancedAudio extends SimpleApplication {
62 public static void main(String[] args) {
63 Logger.getLogger("com.jme3").setLevel(Level.OFF);
64 AdvancedAudio app = new AdvancedAudio();
65 AppSettings settings = new AppSettings(true);
66 settings.setAudioRenderer("Send");
67 app.setSettings(settings);
68 app.setShowSettings(false);
69 app.setPauseOnLostFocus(false);
71 try {Capture.captureVideo(app, new File("/home/r/tmp/out"));
72 Capture.captureAudio(app, new File("/home/r/tmp/main.wav"));}
73 catch (IOException e) {e.printStackTrace();}
74 app.start();
76 }
78 private MotionTrack motionControl;
81 private Geometry makeEar(Node root, Vector3f position){
82 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
83 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));
84 ear.setLocalTranslation(position);
85 mat.setColor("Color", ColorRGBA.Green);
86 ear.setMaterial(mat);
87 root.attachChild(ear);
88 return ear;
89 }
91 private Geometry bell;
93 private Geometry ear1;
94 private Geometry ear2;
95 private Geometry ear3;
99 private Vector3f[] path = new Vector3f[]{
100 // loop 1
101 new Vector3f(0, 0, 0),
102 new Vector3f(0, 0, -10),
103 new Vector3f(-2, 0, -14),
104 new Vector3f(-6, 0, -20),
105 new Vector3f(0, 0, -26),
106 new Vector3f(6, 0, -20),
107 new Vector3f(0, 0, -14),
108 new Vector3f(-6, 0, -20),
109 new Vector3f(0, 0, -26),
110 new Vector3f(6, 0, -20),
111 // loop 2
112 new Vector3f(5, 0, -5),
113 new Vector3f(7, 0, 1.5f),
114 new Vector3f(14, 0, 2),
115 new Vector3f(20, 0, 6),
116 new Vector3f(26, 0, 0),
117 new Vector3f(20, 0, -6),
118 new Vector3f(14, 0, 0),
119 new Vector3f(20, 0, 6),
120 new Vector3f(26, 0, 0),
121 new Vector3f(20, 0, -6),
122 new Vector3f(14, 0, 0),
123 // loop 3
124 new Vector3f(8, 0, 7.5f),
125 new Vector3f(7, 0, 10.5f),
126 new Vector3f(6, 0, 20),
127 new Vector3f(0, 0, 26),
128 new Vector3f(-6, 0, 20),
129 new Vector3f(0, 0, 14),
130 new Vector3f(6, 0, 20),
131 new Vector3f(0, 0, 26),
132 new Vector3f(-6, 0, 20),
133 new Vector3f(0, 0, 14),
134 // begin ellipse
135 new Vector3f(16, 5, 20),
136 new Vector3f(0, 0, 26),
137 new Vector3f(-16, -10, 20),
138 new Vector3f(0, 0, 14),
139 new Vector3f(16, 20, 20),
140 new Vector3f(0, 0, 26),
141 new Vector3f(-10, -25, 10),
142 new Vector3f(-10, 0, 0),
143 // come at me bro!
144 new Vector3f(-28.00242f, 48.005623f, -34.648228f),
145 new Vector3f(0, 0 , -20),
146 };
150 private void createScene() {
151 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
152 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));
153 mat.setColor("Color", ColorRGBA.Blue);
154 bell.setMaterial(mat);
155 rootNode.attachChild(bell);
157 DirectionalLight light = new DirectionalLight();
158 light.setDirection(new Vector3f(0, -1, 0).normalizeLocal());
159 light.setColor(ColorRGBA.White.mult(1.5f));
160 rootNode.addLight(light);
162 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));
163 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));
164 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));
167 MotionPath track = new MotionPath();
169 for (Vector3f v : path){
170 track.addWayPoint(v);
171 }
174 track.setCurveTension(0.80f);
177 motionControl = new MotionTrack(bell,track);
178 motionControl.setTimer(new IsoTimer(60));
179 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);
180 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));
181 motionControl.setInitialDuration(20f);
182 motionControl.setSpeed(1f);
185 track.enableDebugShape(assetManager, rootNode);
188 positionCamera();
191 }
194 private void positionCamera(){
195 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));
196 // cam.setLocation(new Vector3f(0,0,-20));
197 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));
198 }
200 private AudioNode music;
206 private void initAudio() {
207 org.lwjgl.input.Mouse.setGrabbed(false);
208 music = new AudioNode(assetManager, "Sound/Environment/sqr-1kHz.wav", false);
210 rootNode.attachChild(music);
211 audioRenderer.playSource(music);
212 music.setPositional(true);
213 music.setVolume(1f);
214 music.setReverbEnabled(false);
215 music.setDirectional(false);
216 music.setMaxDistance(200.0f);
217 music.setRefDistance(1f);
218 music.setRolloffFactor(1f);
219 music.setLooping(false);
220 audioRenderer.pauseSource(music);
222 }
227 private Listener auxListener;
228 //public File data1 = new File("/home/r/tmp/data1.wav");
229 //public File data2 = new File("/home/r/tmp/data2.wav");
230 //public File data3 = new File("/home/r/tmp/data3.wav");
231 //public File data4 = new File("/home/r/tmp/data4.wav");
232 //public File data5 = new File("/home/r/tmp/data5.wav");
233 //public File data6 = new File("/home/r/tmp/data6.wav");
236 public class Dancer implements SoundProcessor {
237 Geometry entity;
238 float scale = 2;
239 public Dancer(Geometry entity){
240 this.entity = entity;
241 }
243 /**
244 * this method is irrelevant since there is no state to cleanup.
245 */
246 public void cleanup() {}
249 /**
250 * Dance to the beat! This is the brain of an AI entity that
251 * hears it's surroundings and reacts to them.
252 */
253 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {
254 audioSamples.clear();
255 byte[] data = new byte[numSamples];
256 float[] out = new float[numSamples];
257 audioSamples.get(data);
258 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0,
259 numSamples/format.getFrameSize(), format);
261 float max = Float.NEGATIVE_INFINITY;
262 for (float f : out){if (f > max) max = f;}
263 audioSamples.clear();
265 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}
266 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}
268 //entity.scale(this.scale);
269 //if (this.scale == 2f){this.scale = 0.5f;}
270 //else {this.scale = 2;}
271 }
274 }
278 private void prepareEar(Geometry ear, int n){
279 if (this.audioRenderer instanceof MultiListener){
280 MultiListener rf = (MultiListener)this.audioRenderer;
282 auxListener = new Listener();
283 auxListener.setLocation(ear.getLocalTranslation());
285 rf.addListener(auxListener);
286 WaveFileWriter aux = null;
288 try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));}
289 catch (FileNotFoundException e) {e.printStackTrace();}
291 rf.registerSoundProcessor(auxListener,
292 new CompositeSoundProcessor(new Dancer(ear), aux));
294 }
295 }
298 public void simpleInitApp() {
299 this.setTimer(new IsoTimer(60));
300 initAudio();
301 initKeys();
302 createScene();
304 prepareEar(ear1, 1);
305 prepareEar(ear2, 1);
306 prepareEar(ear3, 1);
308 motionControl.play();
309 }
315 private void initKeys() {
316 inputManager.addMapping("Shoot", new MouseButtonTrigger(0));
317 inputManager.addListener(actionListener, "Shoot");
318 }
320 /** Defining the "Shoot" action: Play a gun sound. */
321 private ActionListener actionListener = new ActionListener() {
322 @Override
323 public void onAction(String name, boolean keyPressed, float tpf) {
324 if (name.equals("Shoot") && !keyPressed) {
326 System.out.println(bell.getLocalTranslation().subtract(listener.getLocation()).length());
327 //bell.getMaterial().setColor("Color", ColorRGBA.randomColor());
328 //audioRenderer.playSource(music);
329 System.out.println(music.getRefDistance());
331 }
332 }
333 };
335 /** Move the listener with the camera - for 3D audio. */
338 //private Vector3f prevBellPos = Vector3f.ZERO;
339 private int countdown = 0;
341 public void simpleUpdate(float tpf) {
342 if (countdown == 0){
343 music.play();
344 }
345 Vector3f loc = cam.getLocation();
346 Quaternion rot = cam.getRotation();
347 listener.setLocation(loc);
348 listener.setRotation(rot);
349 audioRenderer.updateListenerParam(listener, ListenerParam.Rotation);
351 //System.out.println(countdown);
353 //if (countdown++ == 300) { this.requestClose(false);}
355 //System.out.println("channel "+ music.getChannel());
356 //listener.setLocation(cam.getLocation());
357 //listener.setRotation(cam.getRotation());
358 //auxListener.setLocation(loc);
359 //auxListener.setRotation(rot);
360 //if (music.getStatus() != AudioNode.Status.Playing){
361 //audioRenderer.playSource(music);
362 //music.play();
363 // bell.getMaterial().setColor("Color", ColorRGBA.randomColor());
364 //System.out.println("I'm playing! <3");
365 //}
366 //audioRenderer.updateSourceParam(music, AudioParam.Direction);
368 //Vector3f bellVelocity = bell.getLocalTranslation().subtract(prevBellPos).mult(1.0f/tpf);
369 //prevBellPos = bell.getLocalTranslation();
371 music.setLocalTranslation(bell.getLocalTranslation());
373 //System.out.println("distance: " +
374 // music.getLocalTranslation().subtract(listener.getLocation()).length());
376 //music.setVelocity(bellVelocity);
378 //audioRenderer.updateSourceParam(music, AudioParam.Position);
379 //audioRenderer.updateSourceParam(music, AudioParam.Velocity);
382 //System.out.println("main:" + listener.getVolume());
383 //System.out.println("aux:" + auxListener.getVolume());
384 //org.lwjgl.openal.AL10.alSourcef(1, org.lwjgl.openal.AL10.AL_MIN_GAIN, 0f);
385 //org.lwjgl.openal.AL10.alSourcef(1, org.lwjgl.openal.AL10.AL_ROLLOFF_FACTOR, 5f);
387 }
389 }