diff src/com/aurellem/capture/examples/Advanced.java @ 56:afc437f637bd

improved formating
author Robert McIntyre <rlm@mit.edu>
date Sat, 03 Dec 2011 19:25:27 -0600
parents d799a0278cc9
children f5e52169f056
line wrap: on
line diff
     1.1 --- a/src/com/aurellem/capture/examples/Advanced.java	Sat Dec 03 19:18:38 2011 -0600
     1.2 +++ b/src/com/aurellem/capture/examples/Advanced.java	Sat Dec 03 19:25:27 2011 -0600
     1.3 @@ -37,266 +37,266 @@
     1.4  
     1.5  /**
     1.6   * 
     1.7 - * Demonstrates advanced use of the audio capture and recording features.
     1.8 - * Multiple perspectives of the same scene are simultaneously rendered to 
     1.9 - * different sound files.  
    1.10 + * Demonstrates advanced use of the audio capture and recording
    1.11 + * features.  Multiple perspectives of the same scene are
    1.12 + * simultaneously rendered to different sound files.
    1.13   * 
    1.14 - * A key limitation of the way multiple listeners are implemented is that 
    1.15 - * only 3D positioning effects are realized for listeners other than the
    1.16 - * main LWJGL listener.  This means that audio effects such as environment
    1.17 - * settings will *not* be heard on any auxiliary listeners, though sound 
    1.18 - * attenuation will work correctly.  
    1.19 + * A key limitation of the way multiple listeners are implemented is
    1.20 + * that only 3D positioning effects are realized for listeners other
    1.21 + * than the main LWJGL listener.  This means that audio effects such
    1.22 + * as environment settings will *not* be heard on any auxiliary
    1.23 + * listeners, though sound attenuation will work correctly.
    1.24   * 
    1.25 - * Multiple listeners as realized here might be used to make AI entities 
    1.26 - * that can each hear the world from their own perspective.  
    1.27 + * Multiple listeners as realized here might be used to make AI
    1.28 + * entities that can each hear the world from their own perspective.
    1.29   * 
    1.30   * @author Robert McIntyre
    1.31   */
    1.32  
    1.33  public class Advanced extends SimpleApplication {
    1.34  
    1.35 -	/**
    1.36 -	 * You will see three grey cubes, a blue sphere, and a path
    1.37 -	 * which circles each cube.  The blue sphere is generating a
    1.38 -	 * constant monotone sound as it moves along the track.  Each
    1.39 -	 * cube is listening for sound; when a cube hears sound whose
    1.40 -	 * intensity is greater than a certain threshold, it changes
    1.41 -	 * its color from grey to green.
    1.42 -	 * 
    1.43 -	 *  Each cube is also saving whatever it hears to a file.  The
    1.44 -	 *  scene from the perspective of the viewer is also saved to
    1.45 -	 *  a video file.  When you listen to each of the sound files
    1.46 -	 *  alongside the video, the sound will get louder when the
    1.47 -	 *  sphere approaches the cube that generated that sound file.
    1.48 -	 *  This shows that each listener is hearing the world from
    1.49 -	 *  its own perspective.
    1.50 -	 * 
    1.51 -	 */
    1.52 -	public static void main(String[] args) {
    1.53 -		Advanced app = new Advanced();
    1.54 -		AppSettings settings = new AppSettings(true);
    1.55 -		settings.setAudioRenderer(AurellemSystemDelegate.SEND);
    1.56 -		JmeSystem.setSystemDelegate(new AurellemSystemDelegate());
    1.57 -		app.setSettings(settings);
    1.58 -		app.setShowSettings(false);
    1.59 -		app.setPauseOnLostFocus(false);
    1.60 +    /**
    1.61 +     * You will see three grey cubes, a blue sphere, and a path which
    1.62 +     * circles each cube.  The blue sphere is generating a constant
    1.63 +     * monotone sound as it moves along the track.  Each cube is
    1.64 +     * listening for sound; when a cube hears sound whose intensity is
    1.65 +     * greater than a certain threshold, it changes its color from
    1.66 +     * grey to green.
    1.67 +     * 
    1.68 +     *  Each cube is also saving whatever it hears to a file.  The
    1.69 +     *  scene from the perspective of the viewer is also saved to a
    1.70 +     *  video file.  When you listen to each of the sound files
    1.71 +     *  alongside the video, the sound will get louder when the sphere
    1.72 +     *  approaches the cube that generated that sound file.  This
    1.73 +     *  shows that each listener is hearing the world from its own
    1.74 +     *  perspective.
    1.75 +     * 
    1.76 +     */
    1.77 +    public static void main(String[] args) {
    1.78 +	Advanced app = new Advanced();
    1.79 +	AppSettings settings = new AppSettings(true);
    1.80 +	settings.setAudioRenderer(AurellemSystemDelegate.SEND);
    1.81 +	JmeSystem.setSystemDelegate(new AurellemSystemDelegate());
    1.82 +	app.setSettings(settings);
    1.83 +	app.setShowSettings(false);
    1.84 +	app.setPauseOnLostFocus(false);
    1.85  		
    1.86 -		try {
    1.87 -			Capture.captureVideo(app, File.createTempFile("advanced",".avi"));
    1.88 -			Capture.captureAudio(app, File.createTempFile("advanced", ".wav"));
    1.89 -			}
    1.90 -		catch (IOException e) {e.printStackTrace();}
    1.91 +	try {
    1.92 +	    Capture.captureVideo(app, File.createTempFile("advanced",".avi"));
    1.93 +	    Capture.captureAudio(app, File.createTempFile("advanced", ".wav"));
    1.94 +	}
    1.95 +	catch (IOException e) {e.printStackTrace();}
    1.96  		
    1.97 -		app.start();
    1.98 +	app.start();
    1.99 +    }
   1.100 +
   1.101 +	
   1.102 +    private Geometry bell;
   1.103 +    private Geometry ear1;
   1.104 +    private Geometry ear2;
   1.105 +    private Geometry ear3;
   1.106 +    private AudioNode music;
   1.107 +    private MotionTrack motionControl;
   1.108 +		
   1.109 +    private Geometry makeEar(Node root, Vector3f position){
   1.110 +	Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.111 +	Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));
   1.112 +	ear.setLocalTranslation(position);
   1.113 +	mat.setColor("Color", ColorRGBA.Green);
   1.114 +	ear.setMaterial(mat);
   1.115 +	root.attachChild(ear);
   1.116 +	return ear;
   1.117 +    } 
   1.118 +
   1.119 +    private Vector3f[] path = new Vector3f[]{
   1.120 +	// loop 1
   1.121 +	new Vector3f(0, 0, 0),
   1.122 +	new Vector3f(0, 0, -10),
   1.123 +	new Vector3f(-2, 0, -14),
   1.124 +	new Vector3f(-6, 0, -20),
   1.125 +	new Vector3f(0, 0, -26),
   1.126 +	new Vector3f(6, 0, -20),
   1.127 +	new Vector3f(0, 0, -14),
   1.128 +	new Vector3f(-6, 0, -20),
   1.129 +	new Vector3f(0, 0, -26),
   1.130 +	new Vector3f(6, 0, -20),
   1.131 +	// loop 2
   1.132 +	new Vector3f(5, 0, -5),
   1.133 +	new Vector3f(7, 0, 1.5f),
   1.134 +	new Vector3f(14, 0, 2),
   1.135 +	new Vector3f(20, 0, 6),
   1.136 +	new Vector3f(26, 0, 0),
   1.137 +	new Vector3f(20, 0, -6),
   1.138 +	new Vector3f(14, 0, 0),
   1.139 +	new Vector3f(20, 0, 6),
   1.140 +	new Vector3f(26, 0, 0),
   1.141 +	new Vector3f(20, 0, -6),
   1.142 +	new Vector3f(14, 0, 0),
   1.143 +	// loop 3
   1.144 +	new Vector3f(8, 0, 7.5f),
   1.145 +	new Vector3f(7, 0, 10.5f),
   1.146 +	new Vector3f(6, 0, 20),
   1.147 +	new Vector3f(0, 0, 26),
   1.148 +	new Vector3f(-6, 0, 20),
   1.149 +	new Vector3f(0, 0, 14),
   1.150 +	new Vector3f(6, 0, 20),
   1.151 +	new Vector3f(0, 0, 26),
   1.152 +	new Vector3f(-6, 0, 20),
   1.153 +	new Vector3f(0, 0, 14),
   1.154 +	// begin ellipse
   1.155 +	new Vector3f(16, 5, 20),
   1.156 +	new Vector3f(0, 0, 26),
   1.157 +	new Vector3f(-16, -10, 20),
   1.158 +	new Vector3f(0, 0, 14),
   1.159 +	new Vector3f(16, 20, 20),
   1.160 +	new Vector3f(0, 0, 26),
   1.161 +	new Vector3f(-10, -25, 10),
   1.162 +	new Vector3f(-10, 0, 0),
   1.163 +	// come at me!
   1.164 +	new Vector3f(-28.00242f, 48.005623f, -34.648228f),
   1.165 +	new Vector3f(0, 0 , -20),
   1.166 +    };
   1.167 +
   1.168 +    private void createScene() {
   1.169 +	Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.170 +	bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));
   1.171 +	mat.setColor("Color", ColorRGBA.Blue);
   1.172 +	bell.setMaterial(mat);
   1.173 +	rootNode.attachChild(bell);
   1.174 +
   1.175 +	ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));
   1.176 +	ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));
   1.177 +	ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));
   1.178 +
   1.179 +	MotionPath track = new MotionPath();
   1.180 +
   1.181 +	for (Vector3f v : path){
   1.182 +	    track.addWayPoint(v);
   1.183 +	}
   1.184 +	track.setCurveTension(0.80f);
   1.185 +
   1.186 +	motionControl = new MotionTrack(bell,track);
   1.187 +		
   1.188 +	// for now, use reflection to change the timer... 
   1.189 +	// motionControl.setTimer(new IsoTimer(60));
   1.190 +	try {
   1.191 +	    Field timerField;
   1.192 +	    timerField = AbstractCinematicEvent.class.getDeclaredField("timer");
   1.193 +	    timerField.setAccessible(true);
   1.194 +	    try {timerField.set(motionControl, new IsoTimer(60));} 
   1.195 +	    catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.196 +	    catch (IllegalAccessException e) {e.printStackTrace();}
   1.197 +	} 
   1.198 +	catch (SecurityException e) {e.printStackTrace();} 
   1.199 +	catch (NoSuchFieldException e) {e.printStackTrace();}
   1.200 +		
   1.201 +	motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);
   1.202 +	motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));
   1.203 +	motionControl.setInitialDuration(20f);
   1.204 +	motionControl.setSpeed(1f);
   1.205 +
   1.206 +	track.enableDebugShape(assetManager, rootNode);
   1.207 +	positionCamera();
   1.208 +    }
   1.209 +
   1.210 +
   1.211 +    private void positionCamera(){
   1.212 +	this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));
   1.213 +	this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));
   1.214 +    }
   1.215 +
   1.216 +    private void initAudio() {
   1.217 +	org.lwjgl.input.Mouse.setGrabbed(false);	
   1.218 +	music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false);
   1.219 +
   1.220 +	rootNode.attachChild(music);
   1.221 +	audioRenderer.playSource(music);
   1.222 +	music.setPositional(true);
   1.223 +	music.setVolume(1f);
   1.224 +	music.setReverbEnabled(false);
   1.225 +	music.setDirectional(false);
   1.226 +	music.setMaxDistance(200.0f);
   1.227 +	music.setRefDistance(1f);
   1.228 +	//music.setRolloffFactor(1f);
   1.229 +	music.setLooping(false);
   1.230 +	audioRenderer.pauseSource(music); 
   1.231 +    }
   1.232 +
   1.233 +    public class Dancer implements SoundProcessor {
   1.234 +	Geometry entity;
   1.235 +	float scale = 2;
   1.236 +	public Dancer(Geometry entity){
   1.237 +	    this.entity = entity;
   1.238  	}
   1.239  
   1.240 -	
   1.241 -	private Geometry bell;
   1.242 -	private Geometry ear1;
   1.243 -	private Geometry ear2;
   1.244 -	private Geometry ear3;
   1.245 -	private AudioNode music;
   1.246 -	private MotionTrack motionControl;
   1.247 +	/**
   1.248 +	 * this method is irrelevant since there is no state to cleanup.
   1.249 +	 */
   1.250 +	public void cleanup() {}
   1.251 +
   1.252 +
   1.253 +	/**
   1.254 +	 * Respond to sound!  This is the brain of an AI entity that 
   1.255 +	 * hears it's surroundings and reacts to them.
   1.256 +	 */
   1.257 +	public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {
   1.258 +	    audioSamples.clear();
   1.259 +	    byte[] data = new byte[numSamples];
   1.260 +	    float[] out = new float[numSamples];
   1.261 +	    audioSamples.get(data);
   1.262 +	    FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, 
   1.263 +						   numSamples/format.getFrameSize(), format);
   1.264 +
   1.265 +	    float max = Float.NEGATIVE_INFINITY;
   1.266 +	    for (float f : out){if (f > max) max = f;}
   1.267 +	    audioSamples.clear();
   1.268 +
   1.269 +	    if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}
   1.270 +	    else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}
   1.271 +	}
   1.272 +    }
   1.273 +
   1.274 +    private void prepareEar(Geometry ear, int n){
   1.275 +	if (this.audioRenderer instanceof MultiListener){
   1.276 +	    MultiListener rf = (MultiListener)this.audioRenderer;
   1.277 +
   1.278 +	    Listener auxListener = new Listener();
   1.279 +	    auxListener.setLocation(ear.getLocalTranslation());
   1.280 +
   1.281 +	    rf.addListener(auxListener);
   1.282 +	    WaveFileWriter aux = null;
   1.283 +
   1.284 +	    try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} 
   1.285 +	    catch (FileNotFoundException e) {e.printStackTrace();}
   1.286 +
   1.287 +	    rf.registerSoundProcessor(auxListener, 
   1.288 +				      new CompositeSoundProcessor(new Dancer(ear), aux));
   1.289 +	}   
   1.290 +    }
   1.291 +
   1.292 +
   1.293 +    public void simpleInitApp() {
   1.294 +	this.setTimer(new IsoTimer(60));
   1.295 +	initAudio();
   1.296  		
   1.297 -	private Geometry makeEar(Node root, Vector3f position){
   1.298 -		Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.299 -		Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));
   1.300 -		ear.setLocalTranslation(position);
   1.301 -		mat.setColor("Color", ColorRGBA.Green);
   1.302 -		ear.setMaterial(mat);
   1.303 -		root.attachChild(ear);
   1.304 -		return ear;
   1.305 -	} 
   1.306 +	createScene();
   1.307  
   1.308 -	private Vector3f[] path = new Vector3f[]{
   1.309 -			// loop 1
   1.310 -			new Vector3f(0, 0, 0),
   1.311 -			new Vector3f(0, 0, -10),
   1.312 -			new Vector3f(-2, 0, -14),
   1.313 -			new Vector3f(-6, 0, -20),
   1.314 -			new Vector3f(0, 0, -26),
   1.315 -			new Vector3f(6, 0, -20),
   1.316 -			new Vector3f(0, 0, -14),
   1.317 -			new Vector3f(-6, 0, -20),
   1.318 -			new Vector3f(0, 0, -26),
   1.319 -			new Vector3f(6, 0, -20),
   1.320 -			// loop 2
   1.321 -			new Vector3f(5, 0, -5),
   1.322 -			new Vector3f(7, 0, 1.5f),
   1.323 -			new Vector3f(14, 0, 2),
   1.324 -			new Vector3f(20, 0, 6),
   1.325 -			new Vector3f(26, 0, 0),
   1.326 -			new Vector3f(20, 0, -6),
   1.327 -			new Vector3f(14, 0, 0),
   1.328 -			new Vector3f(20, 0, 6),
   1.329 -			new Vector3f(26, 0, 0),
   1.330 -			new Vector3f(20, 0, -6),
   1.331 -			new Vector3f(14, 0, 0),
   1.332 -			// loop 3
   1.333 -			new Vector3f(8, 0, 7.5f),
   1.334 -			new Vector3f(7, 0, 10.5f),
   1.335 -			new Vector3f(6, 0, 20),
   1.336 -			new Vector3f(0, 0, 26),
   1.337 -			new Vector3f(-6, 0, 20),
   1.338 -			new Vector3f(0, 0, 14),
   1.339 -			new Vector3f(6, 0, 20),
   1.340 -			new Vector3f(0, 0, 26),
   1.341 -			new Vector3f(-6, 0, 20),
   1.342 -			new Vector3f(0, 0, 14),
   1.343 -			// begin ellipse
   1.344 -			new Vector3f(16, 5, 20),
   1.345 -			new Vector3f(0, 0, 26),
   1.346 -			new Vector3f(-16, -10, 20),
   1.347 -			new Vector3f(0, 0, 14),
   1.348 -			new Vector3f(16, 20, 20),
   1.349 -			new Vector3f(0, 0, 26),
   1.350 -			new Vector3f(-10, -25, 10),
   1.351 -			new Vector3f(-10, 0, 0),
   1.352 -			// come at me!
   1.353 -			new Vector3f(-28.00242f, 48.005623f, -34.648228f),
   1.354 -			new Vector3f(0, 0 , -20),
   1.355 -	};
   1.356 +	prepareEar(ear1, 1);
   1.357 +	prepareEar(ear2, 1);
   1.358 +	prepareEar(ear3, 1);
   1.359  
   1.360 -	private void createScene() {
   1.361 -		Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.362 -		bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));
   1.363 -		mat.setColor("Color", ColorRGBA.Blue);
   1.364 -		bell.setMaterial(mat);
   1.365 -		rootNode.attachChild(bell);
   1.366 +	motionControl.play();
   1.367 +    }
   1.368  
   1.369 -		ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));
   1.370 -		ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));
   1.371 -		ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));
   1.372 -
   1.373 -		MotionPath track = new MotionPath();
   1.374 -
   1.375 -		for (Vector3f v : path){
   1.376 -			track.addWayPoint(v);
   1.377 -		}
   1.378 -		track.setCurveTension(0.80f);
   1.379 -
   1.380 -		motionControl = new MotionTrack(bell,track);
   1.381 -		
   1.382 -		// for now, use reflection to change the timer... 
   1.383 -		// motionControl.setTimer(new IsoTimer(60));
   1.384 -		try {
   1.385 -			Field timerField;
   1.386 -			timerField = AbstractCinematicEvent.class.getDeclaredField("timer");
   1.387 -			timerField.setAccessible(true);
   1.388 -			try {timerField.set(motionControl, new IsoTimer(60));} 
   1.389 -			catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.390 -			catch (IllegalAccessException e) {e.printStackTrace();}
   1.391 -			} 
   1.392 -		catch (SecurityException e) {e.printStackTrace();} 
   1.393 -		catch (NoSuchFieldException e) {e.printStackTrace();}
   1.394 -		
   1.395 -		motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);
   1.396 -		motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));
   1.397 -		motionControl.setInitialDuration(20f);
   1.398 -		motionControl.setSpeed(1f);
   1.399 -
   1.400 -		track.enableDebugShape(assetManager, rootNode);
   1.401 -		positionCamera();
   1.402 +    public void simpleUpdate(float tpf) {
   1.403 +	if (music.getStatus() != AudioNode.Status.Playing){
   1.404 +	    music.play();
   1.405  	}
   1.406 -
   1.407 -
   1.408 -	private void positionCamera(){
   1.409 -		this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));
   1.410 -		this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));
   1.411 -	}
   1.412 -
   1.413 -	private void initAudio() {
   1.414 -		org.lwjgl.input.Mouse.setGrabbed(false);	
   1.415 -		music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false);
   1.416 -
   1.417 -		rootNode.attachChild(music);
   1.418 -		audioRenderer.playSource(music);
   1.419 -		music.setPositional(true);
   1.420 -		music.setVolume(1f);
   1.421 -		music.setReverbEnabled(false);
   1.422 -		music.setDirectional(false);
   1.423 -		music.setMaxDistance(200.0f);
   1.424 -		music.setRefDistance(1f);
   1.425 -		//music.setRolloffFactor(1f);
   1.426 -		music.setLooping(false);
   1.427 -		audioRenderer.pauseSource(music); 
   1.428 -	}
   1.429 -
   1.430 -	public class Dancer implements SoundProcessor {
   1.431 -		Geometry entity;
   1.432 -		float scale = 2;
   1.433 -		public Dancer(Geometry entity){
   1.434 -			this.entity = entity;
   1.435 -		}
   1.436 -
   1.437 -		/**
   1.438 -		 * this method is irrelevant since there is no state to cleanup.
   1.439 -		 */
   1.440 -		public void cleanup() {}
   1.441 -
   1.442 -
   1.443 -		/**
   1.444 -		 * Respond to sound!  This is the brain of an AI entity that 
   1.445 -		 * hears it's surroundings and reacts to them.
   1.446 -		 */
   1.447 -		public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {
   1.448 -			audioSamples.clear();
   1.449 -			byte[] data = new byte[numSamples];
   1.450 -			float[] out = new float[numSamples];
   1.451 -			audioSamples.get(data);
   1.452 -			FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, 
   1.453 -					numSamples/format.getFrameSize(), format);
   1.454 -
   1.455 -			float max = Float.NEGATIVE_INFINITY;
   1.456 -			for (float f : out){if (f > max) max = f;}
   1.457 -			audioSamples.clear();
   1.458 -
   1.459 -			if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}
   1.460 -			else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}
   1.461 -		}
   1.462 -	}
   1.463 -
   1.464 -	private void prepareEar(Geometry ear, int n){
   1.465 -		if (this.audioRenderer instanceof MultiListener){
   1.466 -			MultiListener rf = (MultiListener)this.audioRenderer;
   1.467 -
   1.468 -			Listener auxListener = new Listener();
   1.469 -			auxListener.setLocation(ear.getLocalTranslation());
   1.470 -
   1.471 -			rf.addListener(auxListener);
   1.472 -			WaveFileWriter aux = null;
   1.473 -
   1.474 -			try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} 
   1.475 -			catch (FileNotFoundException e) {e.printStackTrace();}
   1.476 -
   1.477 -			rf.registerSoundProcessor(auxListener, 
   1.478 -					new CompositeSoundProcessor(new Dancer(ear), aux));
   1.479 -		}   
   1.480 -	}
   1.481 -
   1.482 -
   1.483 -	public void simpleInitApp() {
   1.484 -		this.setTimer(new IsoTimer(60));
   1.485 -		initAudio();
   1.486 -		
   1.487 -		createScene();
   1.488 -
   1.489 -		prepareEar(ear1, 1);
   1.490 -		prepareEar(ear2, 1);
   1.491 -		prepareEar(ear3, 1);
   1.492 -
   1.493 -		motionControl.play();
   1.494 -	}
   1.495 -
   1.496 -	public void simpleUpdate(float tpf) {
   1.497 -		if (music.getStatus() != AudioNode.Status.Playing){
   1.498 -			music.play();
   1.499 -		}
   1.500 -		Vector3f loc = cam.getLocation();
   1.501 -		Quaternion rot = cam.getRotation();
   1.502 -		listener.setLocation(loc);
   1.503 -		listener.setRotation(rot);
   1.504 -		music.setLocalTranslation(bell.getLocalTranslation());
   1.505 -	}
   1.506 +	Vector3f loc = cam.getLocation();
   1.507 +	Quaternion rot = cam.getRotation();
   1.508 +	listener.setLocation(loc);
   1.509 +	listener.setRotation(rot);
   1.510 +	music.setLocalTranslation(bell.getLocalTranslation());
   1.511 +    }
   1.512  
   1.513  }