Mercurial > jmeCapture
comparison src/com/aurellem/capture/examples/Advanced.java @ 56:afc437f637bd
improved formating
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sat, 03 Dec 2011 19:25:27 -0600 |
parents | d799a0278cc9 |
children | f5e52169f056 |
comparison
equal
deleted
inserted
replaced
55:b05f629fc296 | 56:afc437f637bd |
---|---|
35 import com.jme3.system.AppSettings; | 35 import com.jme3.system.AppSettings; |
36 import com.jme3.system.JmeSystem; | 36 import com.jme3.system.JmeSystem; |
37 | 37 |
38 /** | 38 /** |
39 * | 39 * |
40 * Demonstrates advanced use of the audio capture and recording features. | 40 * Demonstrates advanced use of the audio capture and recording |
41 * Multiple perspectives of the same scene are simultaneously rendered to | 41 * features. Multiple perspectives of the same scene are |
42 * different sound files. | 42 * simultaneously rendered to different sound files. |
43 * | 43 * |
44 * A key limitation of the way multiple listeners are implemented is that | 44 * A key limitation of the way multiple listeners are implemented is |
45 * only 3D positioning effects are realized for listeners other than the | 45 * that only 3D positioning effects are realized for listeners other |
46 * main LWJGL listener. This means that audio effects such as environment | 46 * than the main LWJGL listener. This means that audio effects such |
47 * settings will *not* be heard on any auxiliary listeners, though sound | 47 * as environment settings will *not* be heard on any auxiliary |
48 * attenuation will work correctly. | 48 * listeners, though sound attenuation will work correctly. |
49 * | 49 * |
50 * Multiple listeners as realized here might be used to make AI entities | 50 * Multiple listeners as realized here might be used to make AI |
51 * that can each hear the world from their own perspective. | 51 * entities that can each hear the world from their own perspective. |
52 * | 52 * |
53 * @author Robert McIntyre | 53 * @author Robert McIntyre |
54 */ | 54 */ |
55 | 55 |
56 public class Advanced extends SimpleApplication { | 56 public class Advanced extends SimpleApplication { |
57 | 57 |
58 /** | |
59 * You will see three grey cubes, a blue sphere, and a path which | |
60 * circles each cube. The blue sphere is generating a constant | |
61 * monotone sound as it moves along the track. Each cube is | |
62 * listening for sound; when a cube hears sound whose intensity is | |
63 * greater than a certain threshold, it changes its color from | |
64 * grey to green. | |
65 * | |
66 * Each cube is also saving whatever it hears to a file. The | |
67 * scene from the perspective of the viewer is also saved to a | |
68 * video file. When you listen to each of the sound files | |
69 * alongside the video, the sound will get louder when the sphere | |
70 * approaches the cube that generated that sound file. This | |
71 * shows that each listener is hearing the world from its own | |
72 * perspective. | |
73 * | |
74 */ | |
75 public static void main(String[] args) { | |
76 Advanced app = new Advanced(); | |
77 AppSettings settings = new AppSettings(true); | |
78 settings.setAudioRenderer(AurellemSystemDelegate.SEND); | |
79 JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); | |
80 app.setSettings(settings); | |
81 app.setShowSettings(false); | |
82 app.setPauseOnLostFocus(false); | |
83 | |
84 try { | |
85 Capture.captureVideo(app, File.createTempFile("advanced",".avi")); | |
86 Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); | |
87 } | |
88 catch (IOException e) {e.printStackTrace();} | |
89 | |
90 app.start(); | |
91 } | |
92 | |
93 | |
94 private Geometry bell; | |
95 private Geometry ear1; | |
96 private Geometry ear2; | |
97 private Geometry ear3; | |
98 private AudioNode music; | |
99 private MotionTrack motionControl; | |
100 | |
101 private Geometry makeEar(Node root, Vector3f position){ | |
102 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
103 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); | |
104 ear.setLocalTranslation(position); | |
105 mat.setColor("Color", ColorRGBA.Green); | |
106 ear.setMaterial(mat); | |
107 root.attachChild(ear); | |
108 return ear; | |
109 } | |
110 | |
111 private Vector3f[] path = new Vector3f[]{ | |
112 // loop 1 | |
113 new Vector3f(0, 0, 0), | |
114 new Vector3f(0, 0, -10), | |
115 new Vector3f(-2, 0, -14), | |
116 new Vector3f(-6, 0, -20), | |
117 new Vector3f(0, 0, -26), | |
118 new Vector3f(6, 0, -20), | |
119 new Vector3f(0, 0, -14), | |
120 new Vector3f(-6, 0, -20), | |
121 new Vector3f(0, 0, -26), | |
122 new Vector3f(6, 0, -20), | |
123 // loop 2 | |
124 new Vector3f(5, 0, -5), | |
125 new Vector3f(7, 0, 1.5f), | |
126 new Vector3f(14, 0, 2), | |
127 new Vector3f(20, 0, 6), | |
128 new Vector3f(26, 0, 0), | |
129 new Vector3f(20, 0, -6), | |
130 new Vector3f(14, 0, 0), | |
131 new Vector3f(20, 0, 6), | |
132 new Vector3f(26, 0, 0), | |
133 new Vector3f(20, 0, -6), | |
134 new Vector3f(14, 0, 0), | |
135 // loop 3 | |
136 new Vector3f(8, 0, 7.5f), | |
137 new Vector3f(7, 0, 10.5f), | |
138 new Vector3f(6, 0, 20), | |
139 new Vector3f(0, 0, 26), | |
140 new Vector3f(-6, 0, 20), | |
141 new Vector3f(0, 0, 14), | |
142 new Vector3f(6, 0, 20), | |
143 new Vector3f(0, 0, 26), | |
144 new Vector3f(-6, 0, 20), | |
145 new Vector3f(0, 0, 14), | |
146 // begin ellipse | |
147 new Vector3f(16, 5, 20), | |
148 new Vector3f(0, 0, 26), | |
149 new Vector3f(-16, -10, 20), | |
150 new Vector3f(0, 0, 14), | |
151 new Vector3f(16, 20, 20), | |
152 new Vector3f(0, 0, 26), | |
153 new Vector3f(-10, -25, 10), | |
154 new Vector3f(-10, 0, 0), | |
155 // come at me! | |
156 new Vector3f(-28.00242f, 48.005623f, -34.648228f), | |
157 new Vector3f(0, 0 , -20), | |
158 }; | |
159 | |
160 private void createScene() { | |
161 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
162 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); | |
163 mat.setColor("Color", ColorRGBA.Blue); | |
164 bell.setMaterial(mat); | |
165 rootNode.attachChild(bell); | |
166 | |
167 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); | |
168 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); | |
169 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); | |
170 | |
171 MotionPath track = new MotionPath(); | |
172 | |
173 for (Vector3f v : path){ | |
174 track.addWayPoint(v); | |
175 } | |
176 track.setCurveTension(0.80f); | |
177 | |
178 motionControl = new MotionTrack(bell,track); | |
179 | |
180 // for now, use reflection to change the timer... | |
181 // motionControl.setTimer(new IsoTimer(60)); | |
182 try { | |
183 Field timerField; | |
184 timerField = AbstractCinematicEvent.class.getDeclaredField("timer"); | |
185 timerField.setAccessible(true); | |
186 try {timerField.set(motionControl, new IsoTimer(60));} | |
187 catch (IllegalArgumentException e) {e.printStackTrace();} | |
188 catch (IllegalAccessException e) {e.printStackTrace();} | |
189 } | |
190 catch (SecurityException e) {e.printStackTrace();} | |
191 catch (NoSuchFieldException e) {e.printStackTrace();} | |
192 | |
193 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); | |
194 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); | |
195 motionControl.setInitialDuration(20f); | |
196 motionControl.setSpeed(1f); | |
197 | |
198 track.enableDebugShape(assetManager, rootNode); | |
199 positionCamera(); | |
200 } | |
201 | |
202 | |
203 private void positionCamera(){ | |
204 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); | |
205 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); | |
206 } | |
207 | |
208 private void initAudio() { | |
209 org.lwjgl.input.Mouse.setGrabbed(false); | |
210 music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); | |
211 | |
212 rootNode.attachChild(music); | |
213 audioRenderer.playSource(music); | |
214 music.setPositional(true); | |
215 music.setVolume(1f); | |
216 music.setReverbEnabled(false); | |
217 music.setDirectional(false); | |
218 music.setMaxDistance(200.0f); | |
219 music.setRefDistance(1f); | |
220 //music.setRolloffFactor(1f); | |
221 music.setLooping(false); | |
222 audioRenderer.pauseSource(music); | |
223 } | |
224 | |
225 public class Dancer implements SoundProcessor { | |
226 Geometry entity; | |
227 float scale = 2; | |
228 public Dancer(Geometry entity){ | |
229 this.entity = entity; | |
230 } | |
231 | |
58 /** | 232 /** |
59 * You will see three grey cubes, a blue sphere, and a path | 233 * this method is irrelevant since there is no state to cleanup. |
60 * which circles each cube. The blue sphere is generating a | |
61 * constant monotone sound as it moves along the track. Each | |
62 * cube is listening for sound; when a cube hears sound whose | |
63 * intensity is greater than a certain threshold, it changes | |
64 * its color from grey to green. | |
65 * | |
66 * Each cube is also saving whatever it hears to a file. The | |
67 * scene from the perspective of the viewer is also saved to | |
68 * a video file. When you listen to each of the sound files | |
69 * alongside the video, the sound will get louder when the | |
70 * sphere approaches the cube that generated that sound file. | |
71 * This shows that each listener is hearing the world from | |
72 * its own perspective. | |
73 * | |
74 */ | 234 */ |
75 public static void main(String[] args) { | 235 public void cleanup() {} |
76 Advanced app = new Advanced(); | 236 |
77 AppSettings settings = new AppSettings(true); | 237 |
78 settings.setAudioRenderer(AurellemSystemDelegate.SEND); | 238 /** |
79 JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); | 239 * Respond to sound! This is the brain of an AI entity that |
80 app.setSettings(settings); | 240 * hears it's surroundings and reacts to them. |
81 app.setShowSettings(false); | 241 */ |
82 app.setPauseOnLostFocus(false); | 242 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { |
83 | 243 audioSamples.clear(); |
84 try { | 244 byte[] data = new byte[numSamples]; |
85 Capture.captureVideo(app, File.createTempFile("advanced",".avi")); | 245 float[] out = new float[numSamples]; |
86 Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); | 246 audioSamples.get(data); |
87 } | 247 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, |
88 catch (IOException e) {e.printStackTrace();} | 248 numSamples/format.getFrameSize(), format); |
89 | 249 |
90 app.start(); | 250 float max = Float.NEGATIVE_INFINITY; |
91 } | 251 for (float f : out){if (f > max) max = f;} |
92 | 252 audioSamples.clear(); |
93 | 253 |
94 private Geometry bell; | 254 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} |
95 private Geometry ear1; | 255 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} |
96 private Geometry ear2; | 256 } |
97 private Geometry ear3; | 257 } |
98 private AudioNode music; | 258 |
99 private MotionTrack motionControl; | 259 private void prepareEar(Geometry ear, int n){ |
100 | 260 if (this.audioRenderer instanceof MultiListener){ |
101 private Geometry makeEar(Node root, Vector3f position){ | 261 MultiListener rf = (MultiListener)this.audioRenderer; |
102 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | 262 |
103 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); | 263 Listener auxListener = new Listener(); |
104 ear.setLocalTranslation(position); | 264 auxListener.setLocation(ear.getLocalTranslation()); |
105 mat.setColor("Color", ColorRGBA.Green); | 265 |
106 ear.setMaterial(mat); | 266 rf.addListener(auxListener); |
107 root.attachChild(ear); | 267 WaveFileWriter aux = null; |
108 return ear; | 268 |
109 } | 269 try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} |
110 | 270 catch (FileNotFoundException e) {e.printStackTrace();} |
111 private Vector3f[] path = new Vector3f[]{ | 271 |
112 // loop 1 | 272 rf.registerSoundProcessor(auxListener, |
113 new Vector3f(0, 0, 0), | 273 new CompositeSoundProcessor(new Dancer(ear), aux)); |
114 new Vector3f(0, 0, -10), | 274 } |
115 new Vector3f(-2, 0, -14), | 275 } |
116 new Vector3f(-6, 0, -20), | 276 |
117 new Vector3f(0, 0, -26), | 277 |
118 new Vector3f(6, 0, -20), | 278 public void simpleInitApp() { |
119 new Vector3f(0, 0, -14), | 279 this.setTimer(new IsoTimer(60)); |
120 new Vector3f(-6, 0, -20), | 280 initAudio(); |
121 new Vector3f(0, 0, -26), | 281 |
122 new Vector3f(6, 0, -20), | 282 createScene(); |
123 // loop 2 | 283 |
124 new Vector3f(5, 0, -5), | 284 prepareEar(ear1, 1); |
125 new Vector3f(7, 0, 1.5f), | 285 prepareEar(ear2, 1); |
126 new Vector3f(14, 0, 2), | 286 prepareEar(ear3, 1); |
127 new Vector3f(20, 0, 6), | 287 |
128 new Vector3f(26, 0, 0), | 288 motionControl.play(); |
129 new Vector3f(20, 0, -6), | 289 } |
130 new Vector3f(14, 0, 0), | 290 |
131 new Vector3f(20, 0, 6), | 291 public void simpleUpdate(float tpf) { |
132 new Vector3f(26, 0, 0), | 292 if (music.getStatus() != AudioNode.Status.Playing){ |
133 new Vector3f(20, 0, -6), | 293 music.play(); |
134 new Vector3f(14, 0, 0), | 294 } |
135 // loop 3 | 295 Vector3f loc = cam.getLocation(); |
136 new Vector3f(8, 0, 7.5f), | 296 Quaternion rot = cam.getRotation(); |
137 new Vector3f(7, 0, 10.5f), | 297 listener.setLocation(loc); |
138 new Vector3f(6, 0, 20), | 298 listener.setRotation(rot); |
139 new Vector3f(0, 0, 26), | 299 music.setLocalTranslation(bell.getLocalTranslation()); |
140 new Vector3f(-6, 0, 20), | 300 } |
141 new Vector3f(0, 0, 14), | |
142 new Vector3f(6, 0, 20), | |
143 new Vector3f(0, 0, 26), | |
144 new Vector3f(-6, 0, 20), | |
145 new Vector3f(0, 0, 14), | |
146 // begin ellipse | |
147 new Vector3f(16, 5, 20), | |
148 new Vector3f(0, 0, 26), | |
149 new Vector3f(-16, -10, 20), | |
150 new Vector3f(0, 0, 14), | |
151 new Vector3f(16, 20, 20), | |
152 new Vector3f(0, 0, 26), | |
153 new Vector3f(-10, -25, 10), | |
154 new Vector3f(-10, 0, 0), | |
155 // come at me! | |
156 new Vector3f(-28.00242f, 48.005623f, -34.648228f), | |
157 new Vector3f(0, 0 , -20), | |
158 }; | |
159 | |
160 private void createScene() { | |
161 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
162 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); | |
163 mat.setColor("Color", ColorRGBA.Blue); | |
164 bell.setMaterial(mat); | |
165 rootNode.attachChild(bell); | |
166 | |
167 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); | |
168 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); | |
169 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); | |
170 | |
171 MotionPath track = new MotionPath(); | |
172 | |
173 for (Vector3f v : path){ | |
174 track.addWayPoint(v); | |
175 } | |
176 track.setCurveTension(0.80f); | |
177 | |
178 motionControl = new MotionTrack(bell,track); | |
179 | |
180 // for now, use reflection to change the timer... | |
181 // motionControl.setTimer(new IsoTimer(60)); | |
182 try { | |
183 Field timerField; | |
184 timerField = AbstractCinematicEvent.class.getDeclaredField("timer"); | |
185 timerField.setAccessible(true); | |
186 try {timerField.set(motionControl, new IsoTimer(60));} | |
187 catch (IllegalArgumentException e) {e.printStackTrace();} | |
188 catch (IllegalAccessException e) {e.printStackTrace();} | |
189 } | |
190 catch (SecurityException e) {e.printStackTrace();} | |
191 catch (NoSuchFieldException e) {e.printStackTrace();} | |
192 | |
193 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); | |
194 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); | |
195 motionControl.setInitialDuration(20f); | |
196 motionControl.setSpeed(1f); | |
197 | |
198 track.enableDebugShape(assetManager, rootNode); | |
199 positionCamera(); | |
200 } | |
201 | |
202 | |
203 private void positionCamera(){ | |
204 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); | |
205 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); | |
206 } | |
207 | |
208 private void initAudio() { | |
209 org.lwjgl.input.Mouse.setGrabbed(false); | |
210 music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); | |
211 | |
212 rootNode.attachChild(music); | |
213 audioRenderer.playSource(music); | |
214 music.setPositional(true); | |
215 music.setVolume(1f); | |
216 music.setReverbEnabled(false); | |
217 music.setDirectional(false); | |
218 music.setMaxDistance(200.0f); | |
219 music.setRefDistance(1f); | |
220 //music.setRolloffFactor(1f); | |
221 music.setLooping(false); | |
222 audioRenderer.pauseSource(music); | |
223 } | |
224 | |
225 public class Dancer implements SoundProcessor { | |
226 Geometry entity; | |
227 float scale = 2; | |
228 public Dancer(Geometry entity){ | |
229 this.entity = entity; | |
230 } | |
231 | |
232 /** | |
233 * this method is irrelevant since there is no state to cleanup. | |
234 */ | |
235 public void cleanup() {} | |
236 | |
237 | |
238 /** | |
239 * Respond to sound! This is the brain of an AI entity that | |
240 * hears it's surroundings and reacts to them. | |
241 */ | |
242 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { | |
243 audioSamples.clear(); | |
244 byte[] data = new byte[numSamples]; | |
245 float[] out = new float[numSamples]; | |
246 audioSamples.get(data); | |
247 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, | |
248 numSamples/format.getFrameSize(), format); | |
249 | |
250 float max = Float.NEGATIVE_INFINITY; | |
251 for (float f : out){if (f > max) max = f;} | |
252 audioSamples.clear(); | |
253 | |
254 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} | |
255 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} | |
256 } | |
257 } | |
258 | |
259 private void prepareEar(Geometry ear, int n){ | |
260 if (this.audioRenderer instanceof MultiListener){ | |
261 MultiListener rf = (MultiListener)this.audioRenderer; | |
262 | |
263 Listener auxListener = new Listener(); | |
264 auxListener.setLocation(ear.getLocalTranslation()); | |
265 | |
266 rf.addListener(auxListener); | |
267 WaveFileWriter aux = null; | |
268 | |
269 try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} | |
270 catch (FileNotFoundException e) {e.printStackTrace();} | |
271 | |
272 rf.registerSoundProcessor(auxListener, | |
273 new CompositeSoundProcessor(new Dancer(ear), aux)); | |
274 } | |
275 } | |
276 | |
277 | |
278 public void simpleInitApp() { | |
279 this.setTimer(new IsoTimer(60)); | |
280 initAudio(); | |
281 | |
282 createScene(); | |
283 | |
284 prepareEar(ear1, 1); | |
285 prepareEar(ear2, 1); | |
286 prepareEar(ear3, 1); | |
287 | |
288 motionControl.play(); | |
289 } | |
290 | |
291 public void simpleUpdate(float tpf) { | |
292 if (music.getStatus() != AudioNode.Status.Playing){ | |
293 music.play(); | |
294 } | |
295 Vector3f loc = cam.getLocation(); | |
296 Quaternion rot = cam.getRotation(); | |
297 listener.setLocation(loc); | |
298 listener.setRotation(rot); | |
299 music.setLocalTranslation(bell.getLocalTranslation()); | |
300 } | |
301 | 301 |
302 } | 302 } |