Mercurial > jmeCapture
view src/com/aurellem/capture/examples/Advanced.java @ 50:8a091a5f48fa
documentation for basic use case
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Sat, 03 Dec 2011 13:35:13 -0600 |
parents | 121b6d7e4d3f |
children | d799a0278cc9 |
line wrap: on
line source
1 package com.aurellem.capture.examples;3 import java.io.File;4 import java.io.FileNotFoundException;5 import java.io.IOException;6 import java.lang.reflect.Field;7 import java.nio.ByteBuffer;9 import javax.sound.sampled.AudioFormat;11 import org.tritonus.share.sampled.FloatSampleTools;13 import com.aurellem.capture.AurellemSystemDelegate;14 import com.aurellem.capture.Capture;15 import com.aurellem.capture.IsoTimer;16 import com.aurellem.capture.audio.CompositeSoundProcessor;17 import com.aurellem.capture.audio.MultiListener;18 import com.aurellem.capture.audio.SoundProcessor;19 import com.aurellem.capture.audio.WaveFileWriter;20 import com.jme3.app.SimpleApplication;21 import com.jme3.audio.AudioNode;22 import com.jme3.audio.Listener;23 import com.jme3.cinematic.MotionPath;24 import com.jme3.cinematic.events.AbstractCinematicEvent;25 import com.jme3.cinematic.events.MotionTrack;26 import com.jme3.material.Material;27 import com.jme3.math.ColorRGBA;28 import com.jme3.math.FastMath;29 import com.jme3.math.Quaternion;30 import com.jme3.math.Vector3f;31 import com.jme3.scene.Geometry;32 import com.jme3.scene.Node;33 import com.jme3.scene.shape.Box;34 import com.jme3.scene.shape.Sphere;35 import com.jme3.system.AppSettings;36 import com.jme3.system.JmeSystem;38 /**39 *40 * Demonstrates advanced use of the audio capture and recording features.41 * Multiple perspectives of the same scene are simultaneously rendered to42 * different sound files.43 *44 * A key limitation of the way multiple listeners are implemented is that45 * only 3D positioning effects are realized for listeners other than the46 * main LWJGL listener. This means that audio effects such as environment47 * settings will *not* be heard on any auxiliary listeners, though sound48 * attenuation will work correctly.49 *50 * Multiple listeners as realized here might be used to make AI entities51 * that can each hear the world from their own perspective.52 *53 * @author Robert McIntyre54 *55 */57 public class Advanced extends SimpleApplication {59 private Geometry bell;60 private Geometry ear1;61 private Geometry ear2;62 private Geometry ear3;63 private AudioNode music;64 private MotionTrack motionControl;66 public static void main(String[] args) {67 Advanced app = new Advanced();68 AppSettings settings = new AppSettings(true);69 settings.setAudioRenderer(AurellemSystemDelegate.SEND);70 JmeSystem.setSystemDelegate(new AurellemSystemDelegate());71 app.setSettings(settings);72 app.setShowSettings(false);73 app.setPauseOnLostFocus(false);75 try {76 Capture.captureVideo(app, File.createTempFile("advanced",".avi"));77 Capture.captureAudio(app, File.createTempFile("advanced", ".wav"));78 }79 catch (IOException e) {e.printStackTrace();}81 app.start();82 }84 private Geometry makeEar(Node root, Vector3f position){85 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");86 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));87 ear.setLocalTranslation(position);88 mat.setColor("Color", ColorRGBA.Green);89 ear.setMaterial(mat);90 root.attachChild(ear);91 return ear;92 }94 private Vector3f[] path = new Vector3f[]{95 // loop 196 new Vector3f(0, 0, 0),97 new Vector3f(0, 0, -10),98 new Vector3f(-2, 0, -14),99 new Vector3f(-6, 0, -20),100 new Vector3f(0, 0, -26),101 new Vector3f(6, 0, -20),102 new Vector3f(0, 0, -14),103 new Vector3f(-6, 0, -20),104 new Vector3f(0, 0, -26),105 new Vector3f(6, 0, -20),106 // loop 2107 new Vector3f(5, 0, -5),108 new Vector3f(7, 0, 1.5f),109 new Vector3f(14, 0, 2),110 new Vector3f(20, 0, 6),111 new Vector3f(26, 0, 0),112 new Vector3f(20, 0, -6),113 new Vector3f(14, 0, 0),114 new Vector3f(20, 0, 6),115 new Vector3f(26, 0, 0),116 new Vector3f(20, 0, -6),117 new Vector3f(14, 0, 0),118 // loop 3119 new Vector3f(8, 0, 7.5f),120 new Vector3f(7, 0, 10.5f),121 new Vector3f(6, 0, 20),122 new Vector3f(0, 0, 26),123 new Vector3f(-6, 0, 20),124 new Vector3f(0, 0, 14),125 new Vector3f(6, 0, 20),126 new Vector3f(0, 0, 26),127 new Vector3f(-6, 0, 20),128 new Vector3f(0, 0, 14),129 // begin ellipse130 new Vector3f(16, 5, 20),131 new Vector3f(0, 0, 26),132 new Vector3f(-16, -10, 20),133 new Vector3f(0, 0, 14),134 new Vector3f(16, 20, 20),135 new Vector3f(0, 0, 26),136 new Vector3f(-10, -25, 10),137 new Vector3f(-10, 0, 0),138 // come at me!139 new Vector3f(-28.00242f, 48.005623f, -34.648228f),140 new Vector3f(0, 0 , -20),141 };143 private void createScene() {144 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");145 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));146 mat.setColor("Color", ColorRGBA.Blue);147 bell.setMaterial(mat);148 rootNode.attachChild(bell);150 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));151 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));152 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));154 MotionPath track = new MotionPath();156 for (Vector3f v : path){157 track.addWayPoint(v);158 }159 track.setCurveTension(0.80f);161 motionControl = new MotionTrack(bell,track);163 // for now, use reflection to change the timer...164 // motionControl.setTimer(new IsoTimer(60));165 try {166 Field timerField;167 timerField = AbstractCinematicEvent.class.getDeclaredField("timer");168 timerField.setAccessible(true);169 try {timerField.set(motionControl, new IsoTimer(60));}170 catch (IllegalArgumentException e) {e.printStackTrace();}171 catch (IllegalAccessException e) {e.printStackTrace();}172 }173 catch (SecurityException e) {e.printStackTrace();}174 catch (NoSuchFieldException e) {e.printStackTrace();}176 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);177 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));178 motionControl.setInitialDuration(20f);179 motionControl.setSpeed(1f);181 track.enableDebugShape(assetManager, rootNode);182 positionCamera();183 }186 private void positionCamera(){187 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));188 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));189 }191 private void initAudio() {192 org.lwjgl.input.Mouse.setGrabbed(false);193 music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false);195 rootNode.attachChild(music);196 audioRenderer.playSource(music);197 music.setPositional(true);198 music.setVolume(1f);199 music.setReverbEnabled(false);200 music.setDirectional(false);201 music.setMaxDistance(200.0f);202 music.setRefDistance(1f);203 //music.setRolloffFactor(1f);204 music.setLooping(false);205 audioRenderer.pauseSource(music);206 }208 public class Dancer implements SoundProcessor {209 Geometry entity;210 float scale = 2;211 public Dancer(Geometry entity){212 this.entity = entity;213 }215 /**216 * this method is irrelevant since there is no state to cleanup.217 */218 public void cleanup() {}221 /**222 * Respond to sound! This is the brain of an AI entity that223 * hears it's surroundings and reacts to them.224 */225 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {226 audioSamples.clear();227 byte[] data = new byte[numSamples];228 float[] out = new float[numSamples];229 audioSamples.get(data);230 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0,231 numSamples/format.getFrameSize(), format);233 float max = Float.NEGATIVE_INFINITY;234 for (float f : out){if (f > max) max = f;}235 audioSamples.clear();237 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}238 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}239 }240 }242 private void prepareEar(Geometry ear, int n){243 if (this.audioRenderer instanceof MultiListener){244 MultiListener rf = (MultiListener)this.audioRenderer;246 Listener auxListener = new Listener();247 auxListener.setLocation(ear.getLocalTranslation());249 rf.addListener(auxListener);250 WaveFileWriter aux = null;252 try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));}253 catch (FileNotFoundException e) {e.printStackTrace();}255 rf.registerSoundProcessor(auxListener,256 new CompositeSoundProcessor(new Dancer(ear), aux));257 }258 }261 public void simpleInitApp() {262 this.setTimer(new IsoTimer(60));263 initAudio();265 createScene();267 prepareEar(ear1, 1);268 prepareEar(ear2, 1);269 prepareEar(ear3, 1);271 motionControl.play();272 }274 public void simpleUpdate(float tpf) {275 if (music.getStatus() != AudioNode.Status.Playing){276 music.play();277 }278 Vector3f loc = cam.getLocation();279 Quaternion rot = cam.getRotation();280 listener.setLocation(loc);281 listener.setRotation(rot);282 music.setLocalTranslation(bell.getLocalTranslation());283 }285 }