Mercurial > jmeCapture
comparison src/com/aurellem/capture/examples/Advanced.java @ 65:23e3df41db3c
reformatting for web
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sat, 11 Feb 2012 12:25:26 -0700 |
parents | f5e52169f056 |
children |
comparison
equal
deleted
inserted
replaced
64:155c70b7e6de | 65:23e3df41db3c |
---|---|
52 * @author Robert McIntyre | 52 * @author Robert McIntyre |
53 */ | 53 */ |
54 | 54 |
55 public class Advanced extends SimpleApplication { | 55 public class Advanced extends SimpleApplication { |
56 | 56 |
57 /** | |
58 * You will see three grey cubes, a blue sphere, and a path which | |
59 * circles each cube. The blue sphere is generating a constant | |
60 * monotone sound as it moves along the track. Each cube is | |
61 * listening for sound; when a cube hears sound whose intensity is | |
62 * greater than a certain threshold, it changes its color from | |
63 * grey to green. | |
64 * | |
65 * Each cube is also saving whatever it hears to a file. The | |
66 * scene from the perspective of the viewer is also saved to a | |
67 * video file. When you listen to each of the sound files | |
68 * alongside the video, the sound will get louder when the sphere | |
69 * approaches the cube that generated that sound file. This | |
70 * shows that each listener is hearing the world from its own | |
71 * perspective. | |
72 * | |
73 */ | |
74 public static void main(String[] args) { | |
75 Advanced app = new Advanced(); | |
76 AppSettings settings = new AppSettings(true); | |
77 settings.setAudioRenderer(AurellemSystemDelegate.SEND); | |
78 JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); | |
79 app.setSettings(settings); | |
80 app.setShowSettings(false); | |
81 app.setPauseOnLostFocus(false); | |
82 | |
83 try { | |
84 //Capture.captureVideo(app, File.createTempFile("advanced",".avi")); | |
85 Capture.captureAudio(app, File.createTempFile("advanced",".wav")); | |
86 } | |
87 catch (IOException e) {e.printStackTrace();} | |
88 | |
89 app.start(); | |
90 } | |
91 | |
92 private Geometry bell; | |
93 private Geometry ear1; | |
94 private Geometry ear2; | |
95 private Geometry ear3; | |
96 private AudioNode music; | |
97 private MotionTrack motionControl; | |
98 private IsoTimer motionTimer = new IsoTimer(60); | |
99 | |
100 private Geometry makeEar(Node root, Vector3f position){ | |
101 Material mat = new Material(assetManager, | |
102 "Common/MatDefs/Misc/Unshaded.j3md"); | |
103 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); | |
104 ear.setLocalTranslation(position); | |
105 mat.setColor("Color", ColorRGBA.Green); | |
106 ear.setMaterial(mat); | |
107 root.attachChild(ear); | |
108 return ear; | |
109 } | |
110 | |
111 private Vector3f[] path = new Vector3f[]{ | |
112 // loop 1 | |
113 new Vector3f(0, 0, 0), | |
114 new Vector3f(0, 0, -10), | |
115 new Vector3f(-2, 0, -14), | |
116 new Vector3f(-6, 0, -20), | |
117 new Vector3f(0, 0, -26), | |
118 new Vector3f(6, 0, -20), | |
119 new Vector3f(0, 0, -14), | |
120 new Vector3f(-6, 0, -20), | |
121 new Vector3f(0, 0, -26), | |
122 new Vector3f(6, 0, -20), | |
123 // loop 2 | |
124 new Vector3f(5, 0, -5), | |
125 new Vector3f(7, 0, 1.5f), | |
126 new Vector3f(14, 0, 2), | |
127 new Vector3f(20, 0, 6), | |
128 new Vector3f(26, 0, 0), | |
129 new Vector3f(20, 0, -6), | |
130 new Vector3f(14, 0, 0), | |
131 new Vector3f(20, 0, 6), | |
132 new Vector3f(26, 0, 0), | |
133 new Vector3f(20, 0, -6), | |
134 new Vector3f(14, 0, 0), | |
135 // loop 3 | |
136 new Vector3f(8, 0, 7.5f), | |
137 new Vector3f(7, 0, 10.5f), | |
138 new Vector3f(6, 0, 20), | |
139 new Vector3f(0, 0, 26), | |
140 new Vector3f(-6, 0, 20), | |
141 new Vector3f(0, 0, 14), | |
142 new Vector3f(6, 0, 20), | |
143 new Vector3f(0, 0, 26), | |
144 new Vector3f(-6, 0, 20), | |
145 new Vector3f(0, 0, 14), | |
146 // begin ellipse | |
147 new Vector3f(16, 5, 20), | |
148 new Vector3f(0, 0, 26), | |
149 new Vector3f(-16, -10, 20), | |
150 new Vector3f(0, 0, 14), | |
151 new Vector3f(16, 20, 20), | |
152 new Vector3f(0, 0, 26), | |
153 new Vector3f(-10, -25, 10), | |
154 new Vector3f(-10, 0, 0), | |
155 // come at me! | |
156 new Vector3f(-28.00242f, 48.005623f, -34.648228f), | |
157 new Vector3f(0, 0 , -20), | |
158 }; | |
159 | |
160 private void createScene() { | |
161 Material mat = new Material(assetManager, | |
162 "Common/MatDefs/Misc/Unshaded.j3md"); | |
163 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); | |
164 mat.setColor("Color", ColorRGBA.Blue); | |
165 bell.setMaterial(mat); | |
166 rootNode.attachChild(bell); | |
167 | |
168 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); | |
169 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); | |
170 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); | |
171 | |
172 MotionPath track = new MotionPath(); | |
173 | |
174 for (Vector3f v : path){ | |
175 track.addWayPoint(v); | |
176 } | |
177 track.setCurveTension(0.80f); | |
178 | |
179 motionControl = new MotionTrack(bell,track); | |
180 // for now, use reflection to change the timer... | |
181 // motionControl.setTimer(new IsoTimer(60)); | |
182 | |
183 try { | |
184 Field timerField; | |
185 timerField = | |
186 AbstractCinematicEvent.class.getDeclaredField("timer"); | |
187 timerField.setAccessible(true); | |
188 try {timerField.set(motionControl, motionTimer);} | |
189 catch (IllegalArgumentException e) {e.printStackTrace();} | |
190 catch (IllegalAccessException e) {e.printStackTrace();} | |
191 } | |
192 catch (SecurityException e) {e.printStackTrace();} | |
193 catch (NoSuchFieldException e) {e.printStackTrace();} | |
194 | |
195 | |
196 motionControl.setDirectionType | |
197 (MotionTrack.Direction.PathAndRotation); | |
198 motionControl.setRotation | |
199 (new Quaternion().fromAngleNormalAxis | |
200 (-FastMath.HALF_PI, Vector3f.UNIT_Y)); | |
201 motionControl.setInitialDuration(20f); | |
202 motionControl.setSpeed(1f); | |
203 | |
204 track.enableDebugShape(assetManager, rootNode); | |
205 positionCamera(); | |
206 } | |
207 | |
208 private void positionCamera(){ | |
209 this.cam.setLocation | |
210 (new Vector3f(-28.00242f, 48.005623f, -34.648228f)); | |
211 this.cam.setRotation | |
212 (new Quaternion | |
213 (0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); | |
214 } | |
215 | |
216 private void initAudio() { | |
217 org.lwjgl.input.Mouse.setGrabbed(false); | |
218 music = new AudioNode(assetManager, | |
219 "Sound/Effects/Beep.ogg", false); | |
220 rootNode.attachChild(music); | |
221 audioRenderer.playSource(music); | |
222 music.setPositional(true); | |
223 music.setVolume(1f); | |
224 music.setReverbEnabled(false); | |
225 music.setDirectional(false); | |
226 music.setMaxDistance(200.0f); | |
227 music.setRefDistance(1f); | |
228 //music.setRolloffFactor(1f); | |
229 music.setLooping(false); | |
230 audioRenderer.pauseSource(music); | |
231 } | |
232 | |
233 public class Dancer implements SoundProcessor { | |
234 Geometry entity; | |
235 float scale = 2; | |
236 public Dancer(Geometry entity){ | |
237 this.entity = entity; | |
238 } | |
239 | |
57 /** | 240 /** |
58 * You will see three grey cubes, a blue sphere, and a path which | 241 * this method is irrelevant since there is no state to cleanup. |
59 * circles each cube. The blue sphere is generating a constant | |
60 * monotone sound as it moves along the track. Each cube is | |
61 * listening for sound; when a cube hears sound whose intensity is | |
62 * greater than a certain threshold, it changes its color from | |
63 * grey to green. | |
64 * | |
65 * Each cube is also saving whatever it hears to a file. The | |
66 * scene from the perspective of the viewer is also saved to a | |
67 * video file. When you listen to each of the sound files | |
68 * alongside the video, the sound will get louder when the sphere | |
69 * approaches the cube that generated that sound file. This | |
70 * shows that each listener is hearing the world from its own | |
71 * perspective. | |
72 * | |
73 */ | 242 */ |
74 public static void main(String[] args) { | 243 public void cleanup() {} |
75 Advanced app = new Advanced(); | 244 |
76 AppSettings settings = new AppSettings(true); | 245 |
77 settings.setAudioRenderer(AurellemSystemDelegate.SEND); | 246 /** |
78 JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); | 247 * Respond to sound! This is the brain of an AI entity that |
79 app.setSettings(settings); | 248 * hears its surroundings and reacts to them. |
80 app.setShowSettings(false); | 249 */ |
81 app.setPauseOnLostFocus(false); | 250 public void process(ByteBuffer audioSamples, |
82 | 251 int numSamples, AudioFormat format) { |
83 try { | 252 audioSamples.clear(); |
84 //Capture.captureVideo(app, File.createTempFile("advanced",".avi")); | 253 byte[] data = new byte[numSamples]; |
85 Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); | 254 float[] out = new float[numSamples]; |
86 } | 255 audioSamples.get(data); |
87 catch (IOException e) {e.printStackTrace();} | 256 FloatSampleTools. |
88 | 257 byte2floatInterleaved |
89 app.start(); | 258 (data, 0, out, 0, numSamples/format.getFrameSize(), format); |
90 } | 259 |
91 | 260 float max = Float.NEGATIVE_INFINITY; |
92 | 261 for (float f : out){if (f > max) max = f;} |
93 private Geometry bell; | 262 audioSamples.clear(); |
94 private Geometry ear1; | 263 |
95 private Geometry ear2; | 264 if (max > 0.1){ |
96 private Geometry ear3; | 265 entity.getMaterial().setColor("Color", ColorRGBA.Green); |
97 private AudioNode music; | 266 } |
98 private MotionTrack motionControl; | 267 else { |
99 private IsoTimer motionTimer = new IsoTimer(60); | 268 entity.getMaterial().setColor("Color", ColorRGBA.Gray); |
100 | 269 } |
101 private Geometry makeEar(Node root, Vector3f position){ | 270 } |
102 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | 271 } |
103 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); | 272 |
104 ear.setLocalTranslation(position); | 273 private void prepareEar(Geometry ear, int n){ |
105 mat.setColor("Color", ColorRGBA.Green); | 274 if (this.audioRenderer instanceof MultiListener){ |
106 ear.setMaterial(mat); | 275 MultiListener rf = (MultiListener)this.audioRenderer; |
107 root.attachChild(ear); | 276 |
108 return ear; | 277 Listener auxListener = new Listener(); |
109 } | 278 auxListener.setLocation(ear.getLocalTranslation()); |
110 | 279 |
111 private Vector3f[] path = new Vector3f[]{ | 280 rf.addListener(auxListener); |
112 // loop 1 | 281 WaveFileWriter aux = null; |
113 new Vector3f(0, 0, 0), | 282 |
114 new Vector3f(0, 0, -10), | 283 try { |
115 new Vector3f(-2, 0, -14), | 284 aux = new WaveFileWriter |
116 new Vector3f(-6, 0, -20), | 285 (File.createTempFile("advanced-audio-" + n, ".wav"));} |
117 new Vector3f(0, 0, -26), | 286 catch (IOException e) {e.printStackTrace();} |
118 new Vector3f(6, 0, -20), | 287 |
119 new Vector3f(0, 0, -14), | 288 rf.registerSoundProcessor |
120 new Vector3f(-6, 0, -20), | 289 (auxListener, |
121 new Vector3f(0, 0, -26), | 290 new CompositeSoundProcessor(new Dancer(ear), aux)); |
122 new Vector3f(6, 0, -20), | 291 } |
123 // loop 2 | 292 } |
124 new Vector3f(5, 0, -5), | 293 |
125 new Vector3f(7, 0, 1.5f), | 294 public void simpleInitApp() { |
126 new Vector3f(14, 0, 2), | 295 this.setTimer(new IsoTimer(60)); |
127 new Vector3f(20, 0, 6), | 296 initAudio(); |
128 new Vector3f(26, 0, 0), | 297 |
129 new Vector3f(20, 0, -6), | 298 createScene(); |
130 new Vector3f(14, 0, 0), | 299 |
131 new Vector3f(20, 0, 6), | 300 prepareEar(ear1, 1); |
132 new Vector3f(26, 0, 0), | 301 prepareEar(ear2, 1); |
133 new Vector3f(20, 0, -6), | 302 prepareEar(ear3, 1); |
134 new Vector3f(14, 0, 0), | 303 |
135 // loop 3 | 304 motionControl.play(); |
136 new Vector3f(8, 0, 7.5f), | 305 } |
137 new Vector3f(7, 0, 10.5f), | 306 |
138 new Vector3f(6, 0, 20), | 307 public void simpleUpdate(float tpf) { |
139 new Vector3f(0, 0, 26), | 308 motionTimer.update(); |
140 new Vector3f(-6, 0, 20), | 309 if (music.getStatus() != AudioNode.Status.Playing){ |
141 new Vector3f(0, 0, 14), | 310 music.play(); |
142 new Vector3f(6, 0, 20), | 311 } |
143 new Vector3f(0, 0, 26), | 312 Vector3f loc = cam.getLocation(); |
144 new Vector3f(-6, 0, 20), | 313 Quaternion rot = cam.getRotation(); |
145 new Vector3f(0, 0, 14), | 314 listener.setLocation(loc); |
146 // begin ellipse | 315 listener.setRotation(rot); |
147 new Vector3f(16, 5, 20), | 316 music.setLocalTranslation(bell.getLocalTranslation()); |
148 new Vector3f(0, 0, 26), | 317 } |
149 new Vector3f(-16, -10, 20), | |
150 new Vector3f(0, 0, 14), | |
151 new Vector3f(16, 20, 20), | |
152 new Vector3f(0, 0, 26), | |
153 new Vector3f(-10, -25, 10), | |
154 new Vector3f(-10, 0, 0), | |
155 // come at me! | |
156 new Vector3f(-28.00242f, 48.005623f, -34.648228f), | |
157 new Vector3f(0, 0 , -20), | |
158 }; | |
159 | |
160 private void createScene() { | |
161 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
162 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); | |
163 mat.setColor("Color", ColorRGBA.Blue); | |
164 bell.setMaterial(mat); | |
165 rootNode.attachChild(bell); | |
166 | |
167 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); | |
168 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); | |
169 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); | |
170 | |
171 MotionPath track = new MotionPath(); | |
172 | |
173 for (Vector3f v : path){ | |
174 track.addWayPoint(v); | |
175 } | |
176 track.setCurveTension(0.80f); | |
177 | |
178 motionControl = new MotionTrack(bell,track); | |
179 // for now, use reflection to change the timer... | |
180 // motionControl.setTimer(new IsoTimer(60)); | |
181 | |
182 try { | |
183 Field timerField; | |
184 timerField = AbstractCinematicEvent.class.getDeclaredField("timer"); | |
185 timerField.setAccessible(true); | |
186 try {timerField.set(motionControl, motionTimer);} | |
187 catch (IllegalArgumentException e) {e.printStackTrace();} | |
188 catch (IllegalAccessException e) {e.printStackTrace();} | |
189 } | |
190 catch (SecurityException e) {e.printStackTrace();} | |
191 catch (NoSuchFieldException e) {e.printStackTrace();} | |
192 | |
193 | |
194 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); | |
195 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); | |
196 motionControl.setInitialDuration(20f); | |
197 motionControl.setSpeed(1f); | |
198 | |
199 track.enableDebugShape(assetManager, rootNode); | |
200 positionCamera(); | |
201 } | |
202 | |
203 | |
204 private void positionCamera(){ | |
205 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); | |
206 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); | |
207 } | |
208 | |
209 private void initAudio() { | |
210 org.lwjgl.input.Mouse.setGrabbed(false); | |
211 music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); | |
212 | |
213 rootNode.attachChild(music); | |
214 audioRenderer.playSource(music); | |
215 music.setPositional(true); | |
216 music.setVolume(1f); | |
217 music.setReverbEnabled(false); | |
218 music.setDirectional(false); | |
219 music.setMaxDistance(200.0f); | |
220 music.setRefDistance(1f); | |
221 //music.setRolloffFactor(1f); | |
222 music.setLooping(false); | |
223 audioRenderer.pauseSource(music); | |
224 } | |
225 | |
226 public class Dancer implements SoundProcessor { | |
227 Geometry entity; | |
228 float scale = 2; | |
229 public Dancer(Geometry entity){ | |
230 this.entity = entity; | |
231 } | |
232 | |
233 /** | |
234 * this method is irrelevant since there is no state to cleanup. | |
235 */ | |
236 public void cleanup() {} | |
237 | |
238 | |
239 /** | |
240 * Respond to sound! This is the brain of an AI entity that | |
241 * hears its surroundings and reacts to them. | |
242 */ | |
243 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { | |
244 audioSamples.clear(); | |
245 byte[] data = new byte[numSamples]; | |
246 float[] out = new float[numSamples]; | |
247 audioSamples.get(data); | |
248 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, | |
249 numSamples/format.getFrameSize(), format); | |
250 | |
251 float max = Float.NEGATIVE_INFINITY; | |
252 for (float f : out){if (f > max) max = f;} | |
253 audioSamples.clear(); | |
254 | |
255 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} | |
256 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} | |
257 } | |
258 } | |
259 | |
260 private void prepareEar(Geometry ear, int n){ | |
261 if (this.audioRenderer instanceof MultiListener){ | |
262 MultiListener rf = (MultiListener)this.audioRenderer; | |
263 | |
264 Listener auxListener = new Listener(); | |
265 auxListener.setLocation(ear.getLocalTranslation()); | |
266 | |
267 rf.addListener(auxListener); | |
268 WaveFileWriter aux = null; | |
269 | |
270 try {aux = new WaveFileWriter(File.createTempFile("advanced-audio-" + n, ".wav"));} | |
271 catch (IOException e) {e.printStackTrace();} | |
272 | |
273 rf.registerSoundProcessor(auxListener, | |
274 new CompositeSoundProcessor(new Dancer(ear), aux)); | |
275 | |
276 } | |
277 } | |
278 | |
279 | |
280 public void simpleInitApp() { | |
281 this.setTimer(new IsoTimer(60)); | |
282 initAudio(); | |
283 | |
284 createScene(); | |
285 | |
286 prepareEar(ear1, 1); | |
287 prepareEar(ear2, 1); | |
288 prepareEar(ear3, 1); | |
289 | |
290 motionControl.play(); | |
291 | |
292 } | |
293 | |
294 public void simpleUpdate(float tpf) { | |
295 motionTimer.update(); | |
296 if (music.getStatus() != AudioNode.Status.Playing){ | |
297 music.play(); | |
298 } | |
299 Vector3f loc = cam.getLocation(); | |
300 Quaternion rot = cam.getRotation(); | |
301 listener.setLocation(loc); | |
302 listener.setRotation(rot); | |
303 music.setLocalTranslation(bell.getLocalTranslation()); | |
304 } | |
305 | |
306 } | 318 } |