changeset 60:42bbb176b90f

first pass at wiki page
author Robert McIntyre <rlm@mit.edu>
date Sat, 03 Dec 2011 23:06:01 -0600
parents 5afa49c5a7d3
children 76581e11fb72
files README
diffstat 1 files changed, 450 insertions(+), 48 deletions(-) [+]
line wrap: on
line diff
     1.1 --- a/README	Sat Dec 03 22:14:21 2011 -0600
     1.2 +++ b/README	Sat Dec 03 23:06:01 2011 -0600
     1.3 @@ -8,7 +8,32 @@
     1.4  audio as a result. A better way is to record video and audio directly
     1.5  from the game while it is running.
     1.6  
     1.7 -There's a full solution for doing this already made for you here:
     1.8 +
     1.9 +===== Simple Way =====
    1.10 +
    1.11 +If all you just want to record video at 30fps with no sound, then look
    1.12 +no further then jMonkeyEngine3's build in ''VideoRecorderAppState''
    1.13 +class.
    1.14 +
    1.15 +Add the following code to your simpleInitApp() method. 
    1.16 +
    1.17 +<code java>
    1.18 +stateManager.attach(new VideoRecorderAppState()); //start recording
    1.19 +</code>
    1.20 +
    1.21 +The game will run slow, but the recording will be in high-quality and
    1.22 +normal speed. The video files will be stored in your user home
    1.23 +directory, if you want to save to another file, specify it in the
    1.24 +VideoRecorderAppState constructor. Recording starts when the state is
    1.25 +attached and ends when the application quits or the state is detached.
    1.26 +
    1.27 +That's all!
    1.28 +
    1.29 +===== Advanced Way =====
    1.30 +
    1.31 +If you want to record audio as well, record at different framerates,
    1.32 +or record from multiple viewpoints at once, then there's a full
    1.33 +solution for doing this already made for you here:
    1.34  
    1.35  http://www.aurellem.com/releases/jmeCapture-latest.zip
    1.36  http://www.aurellem.com/releases/jmeCapture-latest.tar.bz2
    1.37 @@ -19,55 +44,11 @@
    1.38  The javadoc is here:
    1.39  http://www.aurellem.com/jmeCapture/docs/
    1.40  
    1.41 -Here is a complete example showing how to capture both audio and video
    1.42 -from one of jMonkeyEngine3's advenced demo applications.
    1.43 -
    1.44 -<code java>
    1.45 -import java.io.File;
    1.46 -import java.io.IOException;
    1.47 -
    1.48 -import jme3test.water.TestPostWater;
    1.49 -
    1.50 -import com.aurellem.capture.Capture;
    1.51 -import com.aurellem.capture.IsoTimer;
    1.52 -import com.jme3.app.SimpleApplication;
    1.53 -
    1.54 -
    1.55 -/**
    1.56 - * Demonstrates how to use basic Audio/Video capture with a
    1.57 - * jMonkeyEngine application. You can use these techniques to make
    1.58 - * high quality cutscenes or demo videos, even on very slow laptops.
    1.59 - * 
    1.60 - * @author Robert McIntyre
    1.61 - */
    1.62 -
    1.63 -public class Basic {
    1.64 -	
    1.65 -    public static void main(String[] ignore) throws IOException{
    1.66 -	File video = File.createTempFile("JME-water-video", ".avi");
    1.67 -	File audio = File.createTempFile("JME-water-audio", ".wav");
    1.68 -		
    1.69 -	SimpleApplication app = new TestPostWater();
    1.70 -	app.setTimer(new IsoTimer(60));
    1.71 -	app.setShowSettings(false);
    1.72 -		
    1.73 -	Capture.captureVideo(app, video);
    1.74 -	Capture.captureAudio(app, audio);
    1.75 -		
    1.76 -	app.start();
    1.77 -		
    1.78 -	System.out.println(video.getCanonicalPath());
    1.79 -	System.out.println(audio.getCanonicalPath());
    1.80 -    }
    1.81 -}
    1.82 -</java>
    1.83 -
    1.84 -
    1.85 -As you can see, to capture video and audio you use the
    1.86 +To capture video and audio you use the
    1.87  ''com.aurellem.capture.Capture'' class, which has two methods,
    1.88  ''captureAudio'' and ''captureVideo'', and the
    1.89  ''com.aurellem.capture.IsoTimer class'', which sets the audio and
    1.90 -video framerate. 
    1.91 +video framerate.
    1.92  
    1.93  The steps are as simple as:
    1.94  
    1.95 @@ -106,7 +87,7 @@
    1.96      1.0 files generated by this method that exceed 2.0GB are invalid
    1.97      according to the AVI 1.0 spec (but many programs can still deal
    1.98      with them.)  Thanks to Werner Randelshofer for his excellent work
    1.99 -    which made AVI file writer option possible.
   1.100 +    which made the AVI file writer option possible.
   1.101   
   1.102  3.) Any non-directory file ending in anything other than ".avi" will
   1.103      be processed through Xuggle.  Xuggle provides the option to use
   1.104 @@ -114,3 +95,424 @@
   1.105      system yourself in order to use this option. Please visit
   1.106      http://www.xuggle.com/ to learn how to do this.
   1.107  
   1.108 +Note that you will not hear any sound if you choose to record sound to
   1.109 +a file.
   1.110 +
   1.111 +==== Basic Example ====
   1.112 +
   1.113 +Here is a complete example showing how to capture both audio and video
   1.114 +from one of jMonkeyEngine3's advanced demo applications.
   1.115 +
   1.116 +<code java>
   1.117 +import java.io.File;
   1.118 +import java.io.IOException;
   1.119 +
   1.120 +import jme3test.water.TestPostWater;
   1.121 +
   1.122 +import com.aurellem.capture.Capture;
   1.123 +import com.aurellem.capture.IsoTimer;
   1.124 +import com.jme3.app.SimpleApplication;
   1.125 +
   1.126 +
   1.127 +/**
   1.128 + * Demonstrates how to use basic Audio/Video capture with a
   1.129 + * jMonkeyEngine application. You can use these techniques to make
   1.130 + * high quality cutscenes or demo videos, even on very slow laptops.
   1.131 + * 
   1.132 + * @author Robert McIntyre
   1.133 + */
   1.134 +
   1.135 +public class Basic {
   1.136 +	
   1.137 +    public static void main(String[] ignore) throws IOException{
   1.138 +	File video = File.createTempFile("JME-water-video", ".avi");
   1.139 +	File audio = File.createTempFile("JME-water-audio", ".wav");
   1.140 +		
   1.141 +	SimpleApplication app = new TestPostWater();
   1.142 +	app.setTimer(new IsoTimer(60));
   1.143 +	app.setShowSettings(false);
   1.144 +		
   1.145 +	Capture.captureVideo(app, video);
   1.146 +	Capture.captureAudio(app, audio);
   1.147 +		
   1.148 +	app.start();
   1.149 +		
   1.150 +	System.out.println(video.getCanonicalPath());
   1.151 +	System.out.println(audio.getCanonicalPath());
   1.152 +    }
   1.153 +}
   1.154 +</code>
   1.155 +
   1.156 +==== How it works ====
   1.157 +
   1.158 +A standard JME3 application that extends =SimpleApplication= or
   1.159 +=Application= tries as hard as it can to keep in sync with
   1.160 +/user-time/.  If a ball is rolling at 1 game-mile per game-hour in the
   1.161 +game, and you wait for one user-hour as measured by the clock on your
   1.162 +wall, then the ball should have traveled exactly one game-mile. In
   1.163 +order to keep sync with the real world, the game throttles its physics
   1.164 +engine and graphics display.  If the computations involved in running
   1.165 +the game are too intense, then the game will first skip frames, then
   1.166 +sacrifice physics accuracy.  If there are particuraly demanding
   1.167 +computations, then you may only get 1 fps, and the ball may tunnel
   1.168 +through the floor or obstacles due to inaccurate physics simulation,
   1.169 +but after the end of one user-hour, that ball will have traveled one
   1.170 +game-mile.
   1.171 +
   1.172 +When we're recording video, we don't care if the game-time syncs with
   1.173 +user-time, but instead whether the time in the recorded video
   1.174 +(video-time) syncs with user-time. To continue the analogy, if we
   1.175 +recorded the ball rolling at 1 game-mile per game-hour and watched the
   1.176 +video later, we would want to see 30 fps video of the ball rolling at
   1.177 +1 video-mile per /user-hour/. It doesn't matter how much user-time it
   1.178 +took to simulate that hour of game-time to make the high-quality
   1.179 +recording.
   1.180 +
   1.181 +The IsoTimer ignores real-time and always reports that the same amount
   1.182 +of time has passed every time it is called. That way, one can put code
   1.183 +to write each video/audio frame to a file without worrying about that
   1.184 +code itself slowing down the game to the point where the recording
   1.185 +would be useless.
   1.186 +
   1.187 +
   1.188 +==== Advanced Example ==== 
   1.189 +
   1.190 +The package from aurellem.com was made for AI research and can do more
   1.191 +than just record a single stream of audio and video. You can use it
   1.192 +to:
   1.193 +
   1.194 +1.) Create multiple independent listeners that each hear the world
   1.195 +from their own perspective.
   1.196 +
   1.197 +2.) Process the sound data in any way you wish.
   1.198 +
   1.199 +3.) Do the same for visual data.
   1.200 +
   1.201 +Here is a more advanced example, which can also be found along with
   1.202 +other examples in the jmeCapture.jar file included in the
   1.203 +distribution.
   1.204 +
   1.205 +<code java>
   1.206 +package com.aurellem.capture.examples;
   1.207 +
   1.208 +import java.io.File;
   1.209 +import java.io.FileNotFoundException;
   1.210 +import java.io.IOException;
   1.211 +import java.lang.reflect.Field;
   1.212 +import java.nio.ByteBuffer;
   1.213 +
   1.214 +import javax.sound.sampled.AudioFormat;
   1.215 +
   1.216 +import org.tritonus.share.sampled.FloatSampleTools;
   1.217 +
   1.218 +import com.aurellem.capture.AurellemSystemDelegate;
   1.219 +import com.aurellem.capture.Capture;
   1.220 +import com.aurellem.capture.IsoTimer;
   1.221 +import com.aurellem.capture.audio.CompositeSoundProcessor;
   1.222 +import com.aurellem.capture.audio.MultiListener;
   1.223 +import com.aurellem.capture.audio.SoundProcessor;
   1.224 +import com.aurellem.capture.audio.WaveFileWriter;
   1.225 +import com.jme3.app.SimpleApplication;
   1.226 +import com.jme3.audio.AudioNode;
   1.227 +import com.jme3.audio.Listener;
   1.228 +import com.jme3.cinematic.MotionPath;
   1.229 +import com.jme3.cinematic.events.AbstractCinematicEvent;
   1.230 +import com.jme3.cinematic.events.MotionTrack;
   1.231 +import com.jme3.material.Material;
   1.232 +import com.jme3.math.ColorRGBA;
   1.233 +import com.jme3.math.FastMath;
   1.234 +import com.jme3.math.Quaternion;
   1.235 +import com.jme3.math.Vector3f;
   1.236 +import com.jme3.scene.Geometry;
   1.237 +import com.jme3.scene.Node;
   1.238 +import com.jme3.scene.shape.Box;
   1.239 +import com.jme3.scene.shape.Sphere;
   1.240 +import com.jme3.system.AppSettings;
   1.241 +import com.jme3.system.JmeSystem;
   1.242 +
   1.243 +/**
   1.244 + * 
   1.245 + * Demonstrates advanced use of the audio capture and recording
   1.246 + * features.  Multiple perspectives of the same scene are
   1.247 + * simultaneously rendered to different sound files.
   1.248 + * 
   1.249 + * A key limitation of the way multiple listeners are implemented is
   1.250 + * that only 3D positioning effects are realized for listeners other
   1.251 + * than the main LWJGL listener.  This means that audio effects such
   1.252 + * as environment settings will *not* be heard on any auxiliary
   1.253 + * listeners, though sound attenuation will work correctly.
   1.254 + * 
   1.255 + * Multiple listeners as realized here might be used to make AI
   1.256 + * entities that can each hear the world from their own perspective.
   1.257 + * 
   1.258 + * @author Robert McIntyre
   1.259 + */
   1.260 +
   1.261 +public class Advanced extends SimpleApplication {
   1.262 +
   1.263 +    /**
   1.264 +     * You will see three grey cubes, a blue sphere, and a path which
   1.265 +     * circles each cube.  The blue sphere is generating a constant
   1.266 +     * monotone sound as it moves along the track.  Each cube is
   1.267 +     * listening for sound; when a cube hears sound whose intensity is
   1.268 +     * greater than a certain threshold, it changes its color from
   1.269 +     * grey to green.
   1.270 +     * 
   1.271 +     *  Each cube is also saving whatever it hears to a file.  The
   1.272 +     *  scene from the perspective of the viewer is also saved to a
   1.273 +     *  video file.  When you listen to each of the sound files
   1.274 +     *  alongside the video, the sound will get louder when the sphere
   1.275 +     *  approaches the cube that generated that sound file.  This
   1.276 +     *  shows that each listener is hearing the world from its own
   1.277 +     *  perspective.
   1.278 +     * 
   1.279 +     */
   1.280 +    public static void main(String[] args) {
   1.281 +	Advanced app = new Advanced();
   1.282 +	AppSettings settings = new AppSettings(true);
   1.283 +	settings.setAudioRenderer(AurellemSystemDelegate.SEND);
   1.284 +	JmeSystem.setSystemDelegate(new AurellemSystemDelegate());
   1.285 +	app.setSettings(settings);
   1.286 +	app.setShowSettings(false);
   1.287 +	app.setPauseOnLostFocus(false);
   1.288 +		
   1.289 +	try {
   1.290 +	    Capture.captureVideo(app, File.createTempFile("advanced",".avi"));
   1.291 +	    Capture.captureAudio(app, File.createTempFile("advanced", ".wav"));
   1.292 +	}
   1.293 +	catch (IOException e) {e.printStackTrace();}
   1.294 +		
   1.295 +	app.start();
   1.296 +    }
   1.297 +
   1.298 +	
   1.299 +    private Geometry bell;
   1.300 +    private Geometry ear1;
   1.301 +    private Geometry ear2;
   1.302 +    private Geometry ear3;
   1.303 +    private AudioNode music;
   1.304 +    private MotionTrack motionControl;
   1.305 +		
   1.306 +    private Geometry makeEar(Node root, Vector3f position){
   1.307 +	Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.308 +	Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));
   1.309 +	ear.setLocalTranslation(position);
   1.310 +	mat.setColor("Color", ColorRGBA.Green);
   1.311 +	ear.setMaterial(mat);
   1.312 +	root.attachChild(ear);
   1.313 +	return ear;
   1.314 +    } 
   1.315 +
   1.316 +    private Vector3f[] path = new Vector3f[]{
   1.317 +	// loop 1
   1.318 +	new Vector3f(0, 0, 0),
   1.319 +	new Vector3f(0, 0, -10),
   1.320 +	new Vector3f(-2, 0, -14),
   1.321 +	new Vector3f(-6, 0, -20),
   1.322 +	new Vector3f(0, 0, -26),
   1.323 +	new Vector3f(6, 0, -20),
   1.324 +	new Vector3f(0, 0, -14),
   1.325 +	new Vector3f(-6, 0, -20),
   1.326 +	new Vector3f(0, 0, -26),
   1.327 +	new Vector3f(6, 0, -20),
   1.328 +	// loop 2
   1.329 +	new Vector3f(5, 0, -5),
   1.330 +	new Vector3f(7, 0, 1.5f),
   1.331 +	new Vector3f(14, 0, 2),
   1.332 +	new Vector3f(20, 0, 6),
   1.333 +	new Vector3f(26, 0, 0),
   1.334 +	new Vector3f(20, 0, -6),
   1.335 +	new Vector3f(14, 0, 0),
   1.336 +	new Vector3f(20, 0, 6),
   1.337 +	new Vector3f(26, 0, 0),
   1.338 +	new Vector3f(20, 0, -6),
   1.339 +	new Vector3f(14, 0, 0),
   1.340 +	// loop 3
   1.341 +	new Vector3f(8, 0, 7.5f),
   1.342 +	new Vector3f(7, 0, 10.5f),
   1.343 +	new Vector3f(6, 0, 20),
   1.344 +	new Vector3f(0, 0, 26),
   1.345 +	new Vector3f(-6, 0, 20),
   1.346 +	new Vector3f(0, 0, 14),
   1.347 +	new Vector3f(6, 0, 20),
   1.348 +	new Vector3f(0, 0, 26),
   1.349 +	new Vector3f(-6, 0, 20),
   1.350 +	new Vector3f(0, 0, 14),
   1.351 +	// begin ellipse
   1.352 +	new Vector3f(16, 5, 20),
   1.353 +	new Vector3f(0, 0, 26),
   1.354 +	new Vector3f(-16, -10, 20),
   1.355 +	new Vector3f(0, 0, 14),
   1.356 +	new Vector3f(16, 20, 20),
   1.357 +	new Vector3f(0, 0, 26),
   1.358 +	new Vector3f(-10, -25, 10),
   1.359 +	new Vector3f(-10, 0, 0),
   1.360 +	// come at me!
   1.361 +	new Vector3f(-28.00242f, 48.005623f, -34.648228f),
   1.362 +	new Vector3f(0, 0 , -20),
   1.363 +    };
   1.364 +
   1.365 +    private void createScene() {
   1.366 +	Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.367 +	bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));
   1.368 +	mat.setColor("Color", ColorRGBA.Blue);
   1.369 +	bell.setMaterial(mat);
   1.370 +	rootNode.attachChild(bell);
   1.371 +
   1.372 +	ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));
   1.373 +	ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));
   1.374 +	ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));
   1.375 +
   1.376 +	MotionPath track = new MotionPath();
   1.377 +
   1.378 +	for (Vector3f v : path){
   1.379 +	    track.addWayPoint(v);
   1.380 +	}
   1.381 +	track.setCurveTension(0.80f);
   1.382 +
   1.383 +	motionControl = new MotionTrack(bell,track);
   1.384 +		
   1.385 +	// for now, use reflection to change the timer... 
   1.386 +	// motionControl.setTimer(new IsoTimer(60));
   1.387 +	try {
   1.388 +	    Field timerField;
   1.389 +	    timerField = AbstractCinematicEvent.class.getDeclaredField("timer");
   1.390 +	    timerField.setAccessible(true);
   1.391 +	    try {timerField.set(motionControl, new IsoTimer(60));} 
   1.392 +	    catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.393 +	    catch (IllegalAccessException e) {e.printStackTrace();}
   1.394 +	} 
   1.395 +	catch (SecurityException e) {e.printStackTrace();} 
   1.396 +	catch (NoSuchFieldException e) {e.printStackTrace();}
   1.397 +		
   1.398 +	motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);
   1.399 +	motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));
   1.400 +	motionControl.setInitialDuration(20f);
   1.401 +	motionControl.setSpeed(1f);
   1.402 +
   1.403 +	track.enableDebugShape(assetManager, rootNode);
   1.404 +	positionCamera();
   1.405 +    }
   1.406 +
   1.407 +
   1.408 +    private void positionCamera(){
   1.409 +	this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));
   1.410 +	this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));
   1.411 +    }
   1.412 +
   1.413 +    private void initAudio() {
   1.414 +	org.lwjgl.input.Mouse.setGrabbed(false);	
   1.415 +	music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false);
   1.416 +
   1.417 +	rootNode.attachChild(music);
   1.418 +	audioRenderer.playSource(music);
   1.419 +	music.setPositional(true);
   1.420 +	music.setVolume(1f);
   1.421 +	music.setReverbEnabled(false);
   1.422 +	music.setDirectional(false);
   1.423 +	music.setMaxDistance(200.0f);
   1.424 +	music.setRefDistance(1f);
   1.425 +	//music.setRolloffFactor(1f);
   1.426 +	music.setLooping(false);
   1.427 +	audioRenderer.pauseSource(music); 
   1.428 +    }
   1.429 +
   1.430 +    public class Dancer implements SoundProcessor {
   1.431 +	Geometry entity;
   1.432 +	float scale = 2;
   1.433 +	public Dancer(Geometry entity){
   1.434 +	    this.entity = entity;
   1.435 +	}
   1.436 +
   1.437 +	/**
   1.438 +	 * this method is irrelevant since there is no state to cleanup.
   1.439 +	 */
   1.440 +	public void cleanup() {}
   1.441 +
   1.442 +
   1.443 +	/**
   1.444 +	 * Respond to sound!  This is the brain of an AI entity that 
   1.445 +	 * hears it's surroundings and reacts to them.
   1.446 +	 */
   1.447 +	public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {
   1.448 +	    audioSamples.clear();
   1.449 +	    byte[] data = new byte[numSamples];
   1.450 +	    float[] out = new float[numSamples];
   1.451 +	    audioSamples.get(data);
   1.452 +	    FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, 
   1.453 +						   numSamples/format.getFrameSize(), format);
   1.454 +
   1.455 +	    float max = Float.NEGATIVE_INFINITY;
   1.456 +	    for (float f : out){if (f > max) max = f;}
   1.457 +	    audioSamples.clear();
   1.458 +
   1.459 +	    if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}
   1.460 +	    else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}
   1.461 +	}
   1.462 +    }
   1.463 +
   1.464 +    private void prepareEar(Geometry ear, int n){
   1.465 +	if (this.audioRenderer instanceof MultiListener){
   1.466 +	    MultiListener rf = (MultiListener)this.audioRenderer;
   1.467 +
   1.468 +	    Listener auxListener = new Listener();
   1.469 +	    auxListener.setLocation(ear.getLocalTranslation());
   1.470 +
   1.471 +	    rf.addListener(auxListener);
   1.472 +	    WaveFileWriter aux = null;
   1.473 +
   1.474 +	    try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} 
   1.475 +	    catch (FileNotFoundException e) {e.printStackTrace();}
   1.476 +
   1.477 +	    rf.registerSoundProcessor(auxListener, 
   1.478 +				      new CompositeSoundProcessor(new Dancer(ear), aux));
   1.479 +	}   
   1.480 +    }
   1.481 +
   1.482 +
   1.483 +    public void simpleInitApp() {
   1.484 +	this.setTimer(new IsoTimer(60));
   1.485 +	initAudio();
   1.486 +		
   1.487 +	createScene();
   1.488 +
   1.489 +	prepareEar(ear1, 1);
   1.490 +	prepareEar(ear2, 1);
   1.491 +	prepareEar(ear3, 1);
   1.492 +
   1.493 +	motionControl.play();
   1.494 +    }
   1.495 +
   1.496 +    public void simpleUpdate(float tpf) {
   1.497 +	if (music.getStatus() != AudioNode.Status.Playing){
   1.498 +	    music.play();
   1.499 +	}
   1.500 +	Vector3f loc = cam.getLocation();
   1.501 +	Quaternion rot = cam.getRotation();
   1.502 +	listener.setLocation(loc);
   1.503 +	listener.setRotation(rot);
   1.504 +	music.setLocalTranslation(bell.getLocalTranslation());
   1.505 +    }
   1.506 +
   1.507 +}
   1.508 +</code>
   1.509 +
   1.510 +<iframe width="420" height="315" 
   1.511 +	src="http://www.youtube.com/embed/oCEfK0yhDrY" 
   1.512 +	frameborder="0" allowfullscreen>
   1.513 +</iframe>
   1.514 +
   1.515 +===== More Information =====
   1.516 +
   1.517 +This is the old page showing the first version of this idea
   1.518 +http://aurellem.org/cortex/html/capture-video.html
   1.519 +
   1.520 +All source code can be found here:
   1.521 +
   1.522 +http://hg.bortreb.com/audio-send
   1.523 +http://hg.bortreb.com/jmeCapture
   1.524 + 
   1.525 +More information on the modifications to OpenAL to support multiple
   1.526 +listeners can be found here.
   1.527 +
   1.528 +http://aurellem.org/audio-send/html/ear.html