# HG changeset patch # User Robert McIntyre # Date 1322975161 21600 # Node ID 42bbb176b90fb5e860ffff41912127eeecbe7a13 # Parent 5afa49c5a7d3b45879036fa366eb62ec4701a5bc first pass at wiki page diff -r 5afa49c5a7d3 -r 42bbb176b90f README --- a/README Sat Dec 03 22:14:21 2011 -0600 +++ b/README Sat Dec 03 23:06:01 2011 -0600 @@ -8,7 +8,32 @@ audio as a result. A better way is to record video and audio directly from the game while it is running. -There's a full solution for doing this already made for you here: + +===== Simple Way ===== + +If all you just want to record video at 30fps with no sound, then look +no further then jMonkeyEngine3's build in ''VideoRecorderAppState'' +class. + +Add the following code to your simpleInitApp() method. + + +stateManager.attach(new VideoRecorderAppState()); //start recording + + +The game will run slow, but the recording will be in high-quality and +normal speed. The video files will be stored in your user home +directory, if you want to save to another file, specify it in the +VideoRecorderAppState constructor. Recording starts when the state is +attached and ends when the application quits or the state is detached. + +That's all! + +===== Advanced Way ===== + +If you want to record audio as well, record at different framerates, +or record from multiple viewpoints at once, then there's a full +solution for doing this already made for you here: http://www.aurellem.com/releases/jmeCapture-latest.zip http://www.aurellem.com/releases/jmeCapture-latest.tar.bz2 @@ -19,55 +44,11 @@ The javadoc is here: http://www.aurellem.com/jmeCapture/docs/ -Here is a complete example showing how to capture both audio and video -from one of jMonkeyEngine3's advenced demo applications. - - -import java.io.File; -import java.io.IOException; - -import jme3test.water.TestPostWater; - -import com.aurellem.capture.Capture; -import com.aurellem.capture.IsoTimer; -import com.jme3.app.SimpleApplication; - - -/** - * Demonstrates how to use basic Audio/Video capture with a - * jMonkeyEngine application. You can use these techniques to make - * high quality cutscenes or demo videos, even on very slow laptops. - * - * @author Robert McIntyre - */ - -public class Basic { - - public static void main(String[] ignore) throws IOException{ - File video = File.createTempFile("JME-water-video", ".avi"); - File audio = File.createTempFile("JME-water-audio", ".wav"); - - SimpleApplication app = new TestPostWater(); - app.setTimer(new IsoTimer(60)); - app.setShowSettings(false); - - Capture.captureVideo(app, video); - Capture.captureAudio(app, audio); - - app.start(); - - System.out.println(video.getCanonicalPath()); - System.out.println(audio.getCanonicalPath()); - } -} - - - -As you can see, to capture video and audio you use the +To capture video and audio you use the ''com.aurellem.capture.Capture'' class, which has two methods, ''captureAudio'' and ''captureVideo'', and the ''com.aurellem.capture.IsoTimer class'', which sets the audio and -video framerate. +video framerate. The steps are as simple as: @@ -106,7 +87,7 @@ 1.0 files generated by this method that exceed 2.0GB are invalid according to the AVI 1.0 spec (but many programs can still deal with them.) Thanks to Werner Randelshofer for his excellent work - which made AVI file writer option possible. + which made the AVI file writer option possible. 3.) Any non-directory file ending in anything other than ".avi" will be processed through Xuggle. Xuggle provides the option to use @@ -114,3 +95,424 @@ system yourself in order to use this option. Please visit http://www.xuggle.com/ to learn how to do this. +Note that you will not hear any sound if you choose to record sound to +a file. + +==== Basic Example ==== + +Here is a complete example showing how to capture both audio and video +from one of jMonkeyEngine3's advanced demo applications. + + +import java.io.File; +import java.io.IOException; + +import jme3test.water.TestPostWater; + +import com.aurellem.capture.Capture; +import com.aurellem.capture.IsoTimer; +import com.jme3.app.SimpleApplication; + + +/** + * Demonstrates how to use basic Audio/Video capture with a + * jMonkeyEngine application. You can use these techniques to make + * high quality cutscenes or demo videos, even on very slow laptops. + * + * @author Robert McIntyre + */ + +public class Basic { + + public static void main(String[] ignore) throws IOException{ + File video = File.createTempFile("JME-water-video", ".avi"); + File audio = File.createTempFile("JME-water-audio", ".wav"); + + SimpleApplication app = new TestPostWater(); + app.setTimer(new IsoTimer(60)); + app.setShowSettings(false); + + Capture.captureVideo(app, video); + Capture.captureAudio(app, audio); + + app.start(); + + System.out.println(video.getCanonicalPath()); + System.out.println(audio.getCanonicalPath()); + } +} + + +==== How it works ==== + +A standard JME3 application that extends =SimpleApplication= or +=Application= tries as hard as it can to keep in sync with +/user-time/. If a ball is rolling at 1 game-mile per game-hour in the +game, and you wait for one user-hour as measured by the clock on your +wall, then the ball should have traveled exactly one game-mile. In +order to keep sync with the real world, the game throttles its physics +engine and graphics display. If the computations involved in running +the game are too intense, then the game will first skip frames, then +sacrifice physics accuracy. If there are particuraly demanding +computations, then you may only get 1 fps, and the ball may tunnel +through the floor or obstacles due to inaccurate physics simulation, +but after the end of one user-hour, that ball will have traveled one +game-mile. + +When we're recording video, we don't care if the game-time syncs with +user-time, but instead whether the time in the recorded video +(video-time) syncs with user-time. To continue the analogy, if we +recorded the ball rolling at 1 game-mile per game-hour and watched the +video later, we would want to see 30 fps video of the ball rolling at +1 video-mile per /user-hour/. It doesn't matter how much user-time it +took to simulate that hour of game-time to make the high-quality +recording. + +The IsoTimer ignores real-time and always reports that the same amount +of time has passed every time it is called. That way, one can put code +to write each video/audio frame to a file without worrying about that +code itself slowing down the game to the point where the recording +would be useless. + + +==== Advanced Example ==== + +The package from aurellem.com was made for AI research and can do more +than just record a single stream of audio and video. You can use it +to: + +1.) Create multiple independent listeners that each hear the world +from their own perspective. + +2.) Process the sound data in any way you wish. + +3.) Do the same for visual data. + +Here is a more advanced example, which can also be found along with +other examples in the jmeCapture.jar file included in the +distribution. + + +package com.aurellem.capture.examples; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.lang.reflect.Field; +import java.nio.ByteBuffer; + +import javax.sound.sampled.AudioFormat; + +import org.tritonus.share.sampled.FloatSampleTools; + +import com.aurellem.capture.AurellemSystemDelegate; +import com.aurellem.capture.Capture; +import com.aurellem.capture.IsoTimer; +import com.aurellem.capture.audio.CompositeSoundProcessor; +import com.aurellem.capture.audio.MultiListener; +import com.aurellem.capture.audio.SoundProcessor; +import com.aurellem.capture.audio.WaveFileWriter; +import com.jme3.app.SimpleApplication; +import com.jme3.audio.AudioNode; +import com.jme3.audio.Listener; +import com.jme3.cinematic.MotionPath; +import com.jme3.cinematic.events.AbstractCinematicEvent; +import com.jme3.cinematic.events.MotionTrack; +import com.jme3.material.Material; +import com.jme3.math.ColorRGBA; +import com.jme3.math.FastMath; +import com.jme3.math.Quaternion; +import com.jme3.math.Vector3f; +import com.jme3.scene.Geometry; +import com.jme3.scene.Node; +import com.jme3.scene.shape.Box; +import com.jme3.scene.shape.Sphere; +import com.jme3.system.AppSettings; +import com.jme3.system.JmeSystem; + +/** + * + * Demonstrates advanced use of the audio capture and recording + * features. Multiple perspectives of the same scene are + * simultaneously rendered to different sound files. + * + * A key limitation of the way multiple listeners are implemented is + * that only 3D positioning effects are realized for listeners other + * than the main LWJGL listener. This means that audio effects such + * as environment settings will *not* be heard on any auxiliary + * listeners, though sound attenuation will work correctly. + * + * Multiple listeners as realized here might be used to make AI + * entities that can each hear the world from their own perspective. + * + * @author Robert McIntyre + */ + +public class Advanced extends SimpleApplication { + + /** + * You will see three grey cubes, a blue sphere, and a path which + * circles each cube. The blue sphere is generating a constant + * monotone sound as it moves along the track. Each cube is + * listening for sound; when a cube hears sound whose intensity is + * greater than a certain threshold, it changes its color from + * grey to green. + * + * Each cube is also saving whatever it hears to a file. The + * scene from the perspective of the viewer is also saved to a + * video file. When you listen to each of the sound files + * alongside the video, the sound will get louder when the sphere + * approaches the cube that generated that sound file. This + * shows that each listener is hearing the world from its own + * perspective. + * + */ + public static void main(String[] args) { + Advanced app = new Advanced(); + AppSettings settings = new AppSettings(true); + settings.setAudioRenderer(AurellemSystemDelegate.SEND); + JmeSystem.setSystemDelegate(new AurellemSystemDelegate()); + app.setSettings(settings); + app.setShowSettings(false); + app.setPauseOnLostFocus(false); + + try { + Capture.captureVideo(app, File.createTempFile("advanced",".avi")); + Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); + } + catch (IOException e) {e.printStackTrace();} + + app.start(); + } + + + private Geometry bell; + private Geometry ear1; + private Geometry ear2; + private Geometry ear3; + private AudioNode music; + private MotionTrack motionControl; + + private Geometry makeEar(Node root, Vector3f position){ + Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); + Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); + ear.setLocalTranslation(position); + mat.setColor("Color", ColorRGBA.Green); + ear.setMaterial(mat); + root.attachChild(ear); + return ear; + } + + private Vector3f[] path = new Vector3f[]{ + // loop 1 + new Vector3f(0, 0, 0), + new Vector3f(0, 0, -10), + new Vector3f(-2, 0, -14), + new Vector3f(-6, 0, -20), + new Vector3f(0, 0, -26), + new Vector3f(6, 0, -20), + new Vector3f(0, 0, -14), + new Vector3f(-6, 0, -20), + new Vector3f(0, 0, -26), + new Vector3f(6, 0, -20), + // loop 2 + new Vector3f(5, 0, -5), + new Vector3f(7, 0, 1.5f), + new Vector3f(14, 0, 2), + new Vector3f(20, 0, 6), + new Vector3f(26, 0, 0), + new Vector3f(20, 0, -6), + new Vector3f(14, 0, 0), + new Vector3f(20, 0, 6), + new Vector3f(26, 0, 0), + new Vector3f(20, 0, -6), + new Vector3f(14, 0, 0), + // loop 3 + new Vector3f(8, 0, 7.5f), + new Vector3f(7, 0, 10.5f), + new Vector3f(6, 0, 20), + new Vector3f(0, 0, 26), + new Vector3f(-6, 0, 20), + new Vector3f(0, 0, 14), + new Vector3f(6, 0, 20), + new Vector3f(0, 0, 26), + new Vector3f(-6, 0, 20), + new Vector3f(0, 0, 14), + // begin ellipse + new Vector3f(16, 5, 20), + new Vector3f(0, 0, 26), + new Vector3f(-16, -10, 20), + new Vector3f(0, 0, 14), + new Vector3f(16, 20, 20), + new Vector3f(0, 0, 26), + new Vector3f(-10, -25, 10), + new Vector3f(-10, 0, 0), + // come at me! + new Vector3f(-28.00242f, 48.005623f, -34.648228f), + new Vector3f(0, 0 , -20), + }; + + private void createScene() { + Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); + bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); + mat.setColor("Color", ColorRGBA.Blue); + bell.setMaterial(mat); + rootNode.attachChild(bell); + + ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); + ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); + ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); + + MotionPath track = new MotionPath(); + + for (Vector3f v : path){ + track.addWayPoint(v); + } + track.setCurveTension(0.80f); + + motionControl = new MotionTrack(bell,track); + + // for now, use reflection to change the timer... + // motionControl.setTimer(new IsoTimer(60)); + try { + Field timerField; + timerField = AbstractCinematicEvent.class.getDeclaredField("timer"); + timerField.setAccessible(true); + try {timerField.set(motionControl, new IsoTimer(60));} + catch (IllegalArgumentException e) {e.printStackTrace();} + catch (IllegalAccessException e) {e.printStackTrace();} + } + catch (SecurityException e) {e.printStackTrace();} + catch (NoSuchFieldException e) {e.printStackTrace();} + + motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); + motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); + motionControl.setInitialDuration(20f); + motionControl.setSpeed(1f); + + track.enableDebugShape(assetManager, rootNode); + positionCamera(); + } + + + private void positionCamera(){ + this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); + this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); + } + + private void initAudio() { + org.lwjgl.input.Mouse.setGrabbed(false); + music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); + + rootNode.attachChild(music); + audioRenderer.playSource(music); + music.setPositional(true); + music.setVolume(1f); + music.setReverbEnabled(false); + music.setDirectional(false); + music.setMaxDistance(200.0f); + music.setRefDistance(1f); + //music.setRolloffFactor(1f); + music.setLooping(false); + audioRenderer.pauseSource(music); + } + + public class Dancer implements SoundProcessor { + Geometry entity; + float scale = 2; + public Dancer(Geometry entity){ + this.entity = entity; + } + + /** + * this method is irrelevant since there is no state to cleanup. + */ + public void cleanup() {} + + + /** + * Respond to sound! This is the brain of an AI entity that + * hears it's surroundings and reacts to them. + */ + public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { + audioSamples.clear(); + byte[] data = new byte[numSamples]; + float[] out = new float[numSamples]; + audioSamples.get(data); + FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, + numSamples/format.getFrameSize(), format); + + float max = Float.NEGATIVE_INFINITY; + for (float f : out){if (f > max) max = f;} + audioSamples.clear(); + + if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} + else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} + } + } + + private void prepareEar(Geometry ear, int n){ + if (this.audioRenderer instanceof MultiListener){ + MultiListener rf = (MultiListener)this.audioRenderer; + + Listener auxListener = new Listener(); + auxListener.setLocation(ear.getLocalTranslation()); + + rf.addListener(auxListener); + WaveFileWriter aux = null; + + try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} + catch (FileNotFoundException e) {e.printStackTrace();} + + rf.registerSoundProcessor(auxListener, + new CompositeSoundProcessor(new Dancer(ear), aux)); + } + } + + + public void simpleInitApp() { + this.setTimer(new IsoTimer(60)); + initAudio(); + + createScene(); + + prepareEar(ear1, 1); + prepareEar(ear2, 1); + prepareEar(ear3, 1); + + motionControl.play(); + } + + public void simpleUpdate(float tpf) { + if (music.getStatus() != AudioNode.Status.Playing){ + music.play(); + } + Vector3f loc = cam.getLocation(); + Quaternion rot = cam.getRotation(); + listener.setLocation(loc); + listener.setRotation(rot); + music.setLocalTranslation(bell.getLocalTranslation()); + } + +} + + + + +===== More Information ===== + +This is the old page showing the first version of this idea +http://aurellem.org/cortex/html/capture-video.html + +All source code can be found here: + +http://hg.bortreb.com/audio-send +http://hg.bortreb.com/jmeCapture + +More information on the modifications to OpenAL to support multiple +listeners can be found here. + +http://aurellem.org/audio-send/html/ear.html