Mercurial > jmeCapture
comparison README @ 60:42bbb176b90f
first pass at wiki page
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sat, 03 Dec 2011 23:06:01 -0600 |
parents | 5afa49c5a7d3 |
children | 76581e11fb72 |
comparison
equal
deleted
inserted
replaced
59:5afa49c5a7d3 | 60:42bbb176b90f |
---|---|
6 game itself. Screen capturing is the most straightforward way to do | 6 game itself. Screen capturing is the most straightforward way to do |
7 this, but it can slow down your game and produce low-quality video and | 7 this, but it can slow down your game and produce low-quality video and |
8 audio as a result. A better way is to record video and audio directly | 8 audio as a result. A better way is to record video and audio directly |
9 from the game while it is running. | 9 from the game while it is running. |
10 | 10 |
11 There's a full solution for doing this already made for you here: | 11 |
12 ===== Simple Way ===== | |
13 | |
14 If all you just want to record video at 30fps with no sound, then look | |
15 no further then jMonkeyEngine3's build in ''VideoRecorderAppState'' | |
16 class. | |
17 | |
18 Add the following code to your simpleInitApp() method. | |
19 | |
20 <code java> | |
21 stateManager.attach(new VideoRecorderAppState()); //start recording | |
22 </code> | |
23 | |
24 The game will run slow, but the recording will be in high-quality and | |
25 normal speed. The video files will be stored in your user home | |
26 directory, if you want to save to another file, specify it in the | |
27 VideoRecorderAppState constructor. Recording starts when the state is | |
28 attached and ends when the application quits or the state is detached. | |
29 | |
30 That's all! | |
31 | |
32 ===== Advanced Way ===== | |
33 | |
34 If you want to record audio as well, record at different framerates, | |
35 or record from multiple viewpoints at once, then there's a full | |
36 solution for doing this already made for you here: | |
12 | 37 |
13 http://www.aurellem.com/releases/jmeCapture-latest.zip | 38 http://www.aurellem.com/releases/jmeCapture-latest.zip |
14 http://www.aurellem.com/releases/jmeCapture-latest.tar.bz2 | 39 http://www.aurellem.com/releases/jmeCapture-latest.tar.bz2 |
15 | 40 |
16 Download the archive in your preferred format, extract, | 41 Download the archive in your preferred format, extract, |
17 add the jars to your project, and you are ready to go. | 42 add the jars to your project, and you are ready to go. |
18 | 43 |
19 The javadoc is here: | 44 The javadoc is here: |
20 http://www.aurellem.com/jmeCapture/docs/ | 45 http://www.aurellem.com/jmeCapture/docs/ |
21 | 46 |
22 Here is a complete example showing how to capture both audio and video | 47 To capture video and audio you use the |
23 from one of jMonkeyEngine3's advenced demo applications. | |
24 | |
25 <code java> | |
26 import java.io.File; | |
27 import java.io.IOException; | |
28 | |
29 import jme3test.water.TestPostWater; | |
30 | |
31 import com.aurellem.capture.Capture; | |
32 import com.aurellem.capture.IsoTimer; | |
33 import com.jme3.app.SimpleApplication; | |
34 | |
35 | |
36 /** | |
37 * Demonstrates how to use basic Audio/Video capture with a | |
38 * jMonkeyEngine application. You can use these techniques to make | |
39 * high quality cutscenes or demo videos, even on very slow laptops. | |
40 * | |
41 * @author Robert McIntyre | |
42 */ | |
43 | |
44 public class Basic { | |
45 | |
46 public static void main(String[] ignore) throws IOException{ | |
47 File video = File.createTempFile("JME-water-video", ".avi"); | |
48 File audio = File.createTempFile("JME-water-audio", ".wav"); | |
49 | |
50 SimpleApplication app = new TestPostWater(); | |
51 app.setTimer(new IsoTimer(60)); | |
52 app.setShowSettings(false); | |
53 | |
54 Capture.captureVideo(app, video); | |
55 Capture.captureAudio(app, audio); | |
56 | |
57 app.start(); | |
58 | |
59 System.out.println(video.getCanonicalPath()); | |
60 System.out.println(audio.getCanonicalPath()); | |
61 } | |
62 } | |
63 </java> | |
64 | |
65 | |
66 As you can see, to capture video and audio you use the | |
67 ''com.aurellem.capture.Capture'' class, which has two methods, | 48 ''com.aurellem.capture.Capture'' class, which has two methods, |
68 ''captureAudio'' and ''captureVideo'', and the | 49 ''captureAudio'' and ''captureVideo'', and the |
69 ''com.aurellem.capture.IsoTimer class'', which sets the audio and | 50 ''com.aurellem.capture.IsoTimer class'', which sets the audio and |
70 video framerate. | 51 video framerate. |
71 | 52 |
72 The steps are as simple as: | 53 The steps are as simple as: |
73 | 54 |
74 <code java> | 55 <code java> |
75 yourApp.setTimer(new IsoTimer(desiredFramesPerSecond)); | 56 yourApp.setTimer(new IsoTimer(desiredFramesPerSecond)); |
104 your preferred container/codec format. Be advised that some | 85 your preferred container/codec format. Be advised that some |
105 video payers cannot process AVI with a RAW stream, and that AVI | 86 video payers cannot process AVI with a RAW stream, and that AVI |
106 1.0 files generated by this method that exceed 2.0GB are invalid | 87 1.0 files generated by this method that exceed 2.0GB are invalid |
107 according to the AVI 1.0 spec (but many programs can still deal | 88 according to the AVI 1.0 spec (but many programs can still deal |
108 with them.) Thanks to Werner Randelshofer for his excellent work | 89 with them.) Thanks to Werner Randelshofer for his excellent work |
109 which made AVI file writer option possible. | 90 which made the AVI file writer option possible. |
110 | 91 |
111 3.) Any non-directory file ending in anything other than ".avi" will | 92 3.) Any non-directory file ending in anything other than ".avi" will |
112 be processed through Xuggle. Xuggle provides the option to use | 93 be processed through Xuggle. Xuggle provides the option to use |
113 many codecs/containers, but you will have to install it on your | 94 many codecs/containers, but you will have to install it on your |
114 system yourself in order to use this option. Please visit | 95 system yourself in order to use this option. Please visit |
115 http://www.xuggle.com/ to learn how to do this. | 96 http://www.xuggle.com/ to learn how to do this. |
116 | 97 |
98 Note that you will not hear any sound if you choose to record sound to | |
99 a file. | |
100 | |
101 ==== Basic Example ==== | |
102 | |
103 Here is a complete example showing how to capture both audio and video | |
104 from one of jMonkeyEngine3's advanced demo applications. | |
105 | |
106 <code java> | |
107 import java.io.File; | |
108 import java.io.IOException; | |
109 | |
110 import jme3test.water.TestPostWater; | |
111 | |
112 import com.aurellem.capture.Capture; | |
113 import com.aurellem.capture.IsoTimer; | |
114 import com.jme3.app.SimpleApplication; | |
115 | |
116 | |
117 /** | |
118 * Demonstrates how to use basic Audio/Video capture with a | |
119 * jMonkeyEngine application. You can use these techniques to make | |
120 * high quality cutscenes or demo videos, even on very slow laptops. | |
121 * | |
122 * @author Robert McIntyre | |
123 */ | |
124 | |
125 public class Basic { | |
126 | |
127 public static void main(String[] ignore) throws IOException{ | |
128 File video = File.createTempFile("JME-water-video", ".avi"); | |
129 File audio = File.createTempFile("JME-water-audio", ".wav"); | |
130 | |
131 SimpleApplication app = new TestPostWater(); | |
132 app.setTimer(new IsoTimer(60)); | |
133 app.setShowSettings(false); | |
134 | |
135 Capture.captureVideo(app, video); | |
136 Capture.captureAudio(app, audio); | |
137 | |
138 app.start(); | |
139 | |
140 System.out.println(video.getCanonicalPath()); | |
141 System.out.println(audio.getCanonicalPath()); | |
142 } | |
143 } | |
144 </code> | |
145 | |
146 ==== How it works ==== | |
147 | |
148 A standard JME3 application that extends =SimpleApplication= or | |
149 =Application= tries as hard as it can to keep in sync with | |
150 /user-time/. If a ball is rolling at 1 game-mile per game-hour in the | |
151 game, and you wait for one user-hour as measured by the clock on your | |
152 wall, then the ball should have traveled exactly one game-mile. In | |
153 order to keep sync with the real world, the game throttles its physics | |
154 engine and graphics display. If the computations involved in running | |
155 the game are too intense, then the game will first skip frames, then | |
156 sacrifice physics accuracy. If there are particuraly demanding | |
157 computations, then you may only get 1 fps, and the ball may tunnel | |
158 through the floor or obstacles due to inaccurate physics simulation, | |
159 but after the end of one user-hour, that ball will have traveled one | |
160 game-mile. | |
161 | |
162 When we're recording video, we don't care if the game-time syncs with | |
163 user-time, but instead whether the time in the recorded video | |
164 (video-time) syncs with user-time. To continue the analogy, if we | |
165 recorded the ball rolling at 1 game-mile per game-hour and watched the | |
166 video later, we would want to see 30 fps video of the ball rolling at | |
167 1 video-mile per /user-hour/. It doesn't matter how much user-time it | |
168 took to simulate that hour of game-time to make the high-quality | |
169 recording. | |
170 | |
171 The IsoTimer ignores real-time and always reports that the same amount | |
172 of time has passed every time it is called. That way, one can put code | |
173 to write each video/audio frame to a file without worrying about that | |
174 code itself slowing down the game to the point where the recording | |
175 would be useless. | |
176 | |
177 | |
178 ==== Advanced Example ==== | |
179 | |
180 The package from aurellem.com was made for AI research and can do more | |
181 than just record a single stream of audio and video. You can use it | |
182 to: | |
183 | |
184 1.) Create multiple independent listeners that each hear the world | |
185 from their own perspective. | |
186 | |
187 2.) Process the sound data in any way you wish. | |
188 | |
189 3.) Do the same for visual data. | |
190 | |
191 Here is a more advanced example, which can also be found along with | |
192 other examples in the jmeCapture.jar file included in the | |
193 distribution. | |
194 | |
195 <code java> | |
196 package com.aurellem.capture.examples; | |
197 | |
198 import java.io.File; | |
199 import java.io.FileNotFoundException; | |
200 import java.io.IOException; | |
201 import java.lang.reflect.Field; | |
202 import java.nio.ByteBuffer; | |
203 | |
204 import javax.sound.sampled.AudioFormat; | |
205 | |
206 import org.tritonus.share.sampled.FloatSampleTools; | |
207 | |
208 import com.aurellem.capture.AurellemSystemDelegate; | |
209 import com.aurellem.capture.Capture; | |
210 import com.aurellem.capture.IsoTimer; | |
211 import com.aurellem.capture.audio.CompositeSoundProcessor; | |
212 import com.aurellem.capture.audio.MultiListener; | |
213 import com.aurellem.capture.audio.SoundProcessor; | |
214 import com.aurellem.capture.audio.WaveFileWriter; | |
215 import com.jme3.app.SimpleApplication; | |
216 import com.jme3.audio.AudioNode; | |
217 import com.jme3.audio.Listener; | |
218 import com.jme3.cinematic.MotionPath; | |
219 import com.jme3.cinematic.events.AbstractCinematicEvent; | |
220 import com.jme3.cinematic.events.MotionTrack; | |
221 import com.jme3.material.Material; | |
222 import com.jme3.math.ColorRGBA; | |
223 import com.jme3.math.FastMath; | |
224 import com.jme3.math.Quaternion; | |
225 import com.jme3.math.Vector3f; | |
226 import com.jme3.scene.Geometry; | |
227 import com.jme3.scene.Node; | |
228 import com.jme3.scene.shape.Box; | |
229 import com.jme3.scene.shape.Sphere; | |
230 import com.jme3.system.AppSettings; | |
231 import com.jme3.system.JmeSystem; | |
232 | |
233 /** | |
234 * | |
235 * Demonstrates advanced use of the audio capture and recording | |
236 * features. Multiple perspectives of the same scene are | |
237 * simultaneously rendered to different sound files. | |
238 * | |
239 * A key limitation of the way multiple listeners are implemented is | |
240 * that only 3D positioning effects are realized for listeners other | |
241 * than the main LWJGL listener. This means that audio effects such | |
242 * as environment settings will *not* be heard on any auxiliary | |
243 * listeners, though sound attenuation will work correctly. | |
244 * | |
245 * Multiple listeners as realized here might be used to make AI | |
246 * entities that can each hear the world from their own perspective. | |
247 * | |
248 * @author Robert McIntyre | |
249 */ | |
250 | |
251 public class Advanced extends SimpleApplication { | |
252 | |
253 /** | |
254 * You will see three grey cubes, a blue sphere, and a path which | |
255 * circles each cube. The blue sphere is generating a constant | |
256 * monotone sound as it moves along the track. Each cube is | |
257 * listening for sound; when a cube hears sound whose intensity is | |
258 * greater than a certain threshold, it changes its color from | |
259 * grey to green. | |
260 * | |
261 * Each cube is also saving whatever it hears to a file. The | |
262 * scene from the perspective of the viewer is also saved to a | |
263 * video file. When you listen to each of the sound files | |
264 * alongside the video, the sound will get louder when the sphere | |
265 * approaches the cube that generated that sound file. This | |
266 * shows that each listener is hearing the world from its own | |
267 * perspective. | |
268 * | |
269 */ | |
270 public static void main(String[] args) { | |
271 Advanced app = new Advanced(); | |
272 AppSettings settings = new AppSettings(true); | |
273 settings.setAudioRenderer(AurellemSystemDelegate.SEND); | |
274 JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); | |
275 app.setSettings(settings); | |
276 app.setShowSettings(false); | |
277 app.setPauseOnLostFocus(false); | |
278 | |
279 try { | |
280 Capture.captureVideo(app, File.createTempFile("advanced",".avi")); | |
281 Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); | |
282 } | |
283 catch (IOException e) {e.printStackTrace();} | |
284 | |
285 app.start(); | |
286 } | |
287 | |
288 | |
289 private Geometry bell; | |
290 private Geometry ear1; | |
291 private Geometry ear2; | |
292 private Geometry ear3; | |
293 private AudioNode music; | |
294 private MotionTrack motionControl; | |
295 | |
296 private Geometry makeEar(Node root, Vector3f position){ | |
297 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
298 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); | |
299 ear.setLocalTranslation(position); | |
300 mat.setColor("Color", ColorRGBA.Green); | |
301 ear.setMaterial(mat); | |
302 root.attachChild(ear); | |
303 return ear; | |
304 } | |
305 | |
306 private Vector3f[] path = new Vector3f[]{ | |
307 // loop 1 | |
308 new Vector3f(0, 0, 0), | |
309 new Vector3f(0, 0, -10), | |
310 new Vector3f(-2, 0, -14), | |
311 new Vector3f(-6, 0, -20), | |
312 new Vector3f(0, 0, -26), | |
313 new Vector3f(6, 0, -20), | |
314 new Vector3f(0, 0, -14), | |
315 new Vector3f(-6, 0, -20), | |
316 new Vector3f(0, 0, -26), | |
317 new Vector3f(6, 0, -20), | |
318 // loop 2 | |
319 new Vector3f(5, 0, -5), | |
320 new Vector3f(7, 0, 1.5f), | |
321 new Vector3f(14, 0, 2), | |
322 new Vector3f(20, 0, 6), | |
323 new Vector3f(26, 0, 0), | |
324 new Vector3f(20, 0, -6), | |
325 new Vector3f(14, 0, 0), | |
326 new Vector3f(20, 0, 6), | |
327 new Vector3f(26, 0, 0), | |
328 new Vector3f(20, 0, -6), | |
329 new Vector3f(14, 0, 0), | |
330 // loop 3 | |
331 new Vector3f(8, 0, 7.5f), | |
332 new Vector3f(7, 0, 10.5f), | |
333 new Vector3f(6, 0, 20), | |
334 new Vector3f(0, 0, 26), | |
335 new Vector3f(-6, 0, 20), | |
336 new Vector3f(0, 0, 14), | |
337 new Vector3f(6, 0, 20), | |
338 new Vector3f(0, 0, 26), | |
339 new Vector3f(-6, 0, 20), | |
340 new Vector3f(0, 0, 14), | |
341 // begin ellipse | |
342 new Vector3f(16, 5, 20), | |
343 new Vector3f(0, 0, 26), | |
344 new Vector3f(-16, -10, 20), | |
345 new Vector3f(0, 0, 14), | |
346 new Vector3f(16, 20, 20), | |
347 new Vector3f(0, 0, 26), | |
348 new Vector3f(-10, -25, 10), | |
349 new Vector3f(-10, 0, 0), | |
350 // come at me! | |
351 new Vector3f(-28.00242f, 48.005623f, -34.648228f), | |
352 new Vector3f(0, 0 , -20), | |
353 }; | |
354 | |
355 private void createScene() { | |
356 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
357 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); | |
358 mat.setColor("Color", ColorRGBA.Blue); | |
359 bell.setMaterial(mat); | |
360 rootNode.attachChild(bell); | |
361 | |
362 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); | |
363 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); | |
364 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); | |
365 | |
366 MotionPath track = new MotionPath(); | |
367 | |
368 for (Vector3f v : path){ | |
369 track.addWayPoint(v); | |
370 } | |
371 track.setCurveTension(0.80f); | |
372 | |
373 motionControl = new MotionTrack(bell,track); | |
374 | |
375 // for now, use reflection to change the timer... | |
376 // motionControl.setTimer(new IsoTimer(60)); | |
377 try { | |
378 Field timerField; | |
379 timerField = AbstractCinematicEvent.class.getDeclaredField("timer"); | |
380 timerField.setAccessible(true); | |
381 try {timerField.set(motionControl, new IsoTimer(60));} | |
382 catch (IllegalArgumentException e) {e.printStackTrace();} | |
383 catch (IllegalAccessException e) {e.printStackTrace();} | |
384 } | |
385 catch (SecurityException e) {e.printStackTrace();} | |
386 catch (NoSuchFieldException e) {e.printStackTrace();} | |
387 | |
388 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); | |
389 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); | |
390 motionControl.setInitialDuration(20f); | |
391 motionControl.setSpeed(1f); | |
392 | |
393 track.enableDebugShape(assetManager, rootNode); | |
394 positionCamera(); | |
395 } | |
396 | |
397 | |
398 private void positionCamera(){ | |
399 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); | |
400 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); | |
401 } | |
402 | |
403 private void initAudio() { | |
404 org.lwjgl.input.Mouse.setGrabbed(false); | |
405 music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); | |
406 | |
407 rootNode.attachChild(music); | |
408 audioRenderer.playSource(music); | |
409 music.setPositional(true); | |
410 music.setVolume(1f); | |
411 music.setReverbEnabled(false); | |
412 music.setDirectional(false); | |
413 music.setMaxDistance(200.0f); | |
414 music.setRefDistance(1f); | |
415 //music.setRolloffFactor(1f); | |
416 music.setLooping(false); | |
417 audioRenderer.pauseSource(music); | |
418 } | |
419 | |
420 public class Dancer implements SoundProcessor { | |
421 Geometry entity; | |
422 float scale = 2; | |
423 public Dancer(Geometry entity){ | |
424 this.entity = entity; | |
425 } | |
426 | |
427 /** | |
428 * this method is irrelevant since there is no state to cleanup. | |
429 */ | |
430 public void cleanup() {} | |
431 | |
432 | |
433 /** | |
434 * Respond to sound! This is the brain of an AI entity that | |
435 * hears it's surroundings and reacts to them. | |
436 */ | |
437 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { | |
438 audioSamples.clear(); | |
439 byte[] data = new byte[numSamples]; | |
440 float[] out = new float[numSamples]; | |
441 audioSamples.get(data); | |
442 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, | |
443 numSamples/format.getFrameSize(), format); | |
444 | |
445 float max = Float.NEGATIVE_INFINITY; | |
446 for (float f : out){if (f > max) max = f;} | |
447 audioSamples.clear(); | |
448 | |
449 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} | |
450 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} | |
451 } | |
452 } | |
453 | |
454 private void prepareEar(Geometry ear, int n){ | |
455 if (this.audioRenderer instanceof MultiListener){ | |
456 MultiListener rf = (MultiListener)this.audioRenderer; | |
457 | |
458 Listener auxListener = new Listener(); | |
459 auxListener.setLocation(ear.getLocalTranslation()); | |
460 | |
461 rf.addListener(auxListener); | |
462 WaveFileWriter aux = null; | |
463 | |
464 try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} | |
465 catch (FileNotFoundException e) {e.printStackTrace();} | |
466 | |
467 rf.registerSoundProcessor(auxListener, | |
468 new CompositeSoundProcessor(new Dancer(ear), aux)); | |
469 } | |
470 } | |
471 | |
472 | |
473 public void simpleInitApp() { | |
474 this.setTimer(new IsoTimer(60)); | |
475 initAudio(); | |
476 | |
477 createScene(); | |
478 | |
479 prepareEar(ear1, 1); | |
480 prepareEar(ear2, 1); | |
481 prepareEar(ear3, 1); | |
482 | |
483 motionControl.play(); | |
484 } | |
485 | |
486 public void simpleUpdate(float tpf) { | |
487 if (music.getStatus() != AudioNode.Status.Playing){ | |
488 music.play(); | |
489 } | |
490 Vector3f loc = cam.getLocation(); | |
491 Quaternion rot = cam.getRotation(); | |
492 listener.setLocation(loc); | |
493 listener.setRotation(rot); | |
494 music.setLocalTranslation(bell.getLocalTranslation()); | |
495 } | |
496 | |
497 } | |
498 </code> | |
499 | |
500 <iframe width="420" height="315" | |
501 src="http://www.youtube.com/embed/oCEfK0yhDrY" | |
502 frameborder="0" allowfullscreen> | |
503 </iframe> | |
504 | |
505 ===== More Information ===== | |
506 | |
507 This is the old page showing the first version of this idea | |
508 http://aurellem.org/cortex/html/capture-video.html | |
509 | |
510 All source code can be found here: | |
511 | |
512 http://hg.bortreb.com/audio-send | |
513 http://hg.bortreb.com/jmeCapture | |
514 | |
515 More information on the modifications to OpenAL to support multiple | |
516 listeners can be found here. | |
517 | |
518 http://aurellem.org/audio-send/html/ear.html |