diff src/com/aurellem/capture/examples/Advanced.java @ 62:f5e52169f056

updated to work with new jme changes
author Robert McIntyre <rlm@mit.edu>
date Wed, 14 Dec 2011 17:43:52 -0700
parents afc437f637bd
children 23e3df41db3c
line wrap: on
line diff
     1.1 --- a/src/com/aurellem/capture/examples/Advanced.java	Sat Dec 03 23:49:14 2011 -0600
     1.2 +++ b/src/com/aurellem/capture/examples/Advanced.java	Wed Dec 14 17:43:52 2011 -0700
     1.3 @@ -1,7 +1,6 @@
     1.4  package com.aurellem.capture.examples;
     1.5  
     1.6  import java.io.File;
     1.7 -import java.io.FileNotFoundException;
     1.8  import java.io.IOException;
     1.9  import java.lang.reflect.Field;
    1.10  import java.nio.ByteBuffer;
    1.11 @@ -55,248 +54,253 @@
    1.12  
    1.13  public class Advanced extends SimpleApplication {
    1.14  
    1.15 -    /**
    1.16 -     * You will see three grey cubes, a blue sphere, and a path which
    1.17 -     * circles each cube.  The blue sphere is generating a constant
    1.18 -     * monotone sound as it moves along the track.  Each cube is
    1.19 -     * listening for sound; when a cube hears sound whose intensity is
    1.20 -     * greater than a certain threshold, it changes its color from
    1.21 -     * grey to green.
    1.22 -     * 
    1.23 -     *  Each cube is also saving whatever it hears to a file.  The
    1.24 -     *  scene from the perspective of the viewer is also saved to a
    1.25 -     *  video file.  When you listen to each of the sound files
    1.26 -     *  alongside the video, the sound will get louder when the sphere
    1.27 -     *  approaches the cube that generated that sound file.  This
    1.28 -     *  shows that each listener is hearing the world from its own
    1.29 -     *  perspective.
    1.30 -     * 
    1.31 -     */
    1.32 -    public static void main(String[] args) {
    1.33 -	Advanced app = new Advanced();
    1.34 -	AppSettings settings = new AppSettings(true);
    1.35 -	settings.setAudioRenderer(AurellemSystemDelegate.SEND);
    1.36 -	JmeSystem.setSystemDelegate(new AurellemSystemDelegate());
    1.37 -	app.setSettings(settings);
    1.38 -	app.setShowSettings(false);
    1.39 -	app.setPauseOnLostFocus(false);
    1.40 +	/**
    1.41 +	 * You will see three grey cubes, a blue sphere, and a path which
    1.42 +	 * circles each cube.  The blue sphere is generating a constant
    1.43 +	 * monotone sound as it moves along the track.  Each cube is
    1.44 +	 * listening for sound; when a cube hears sound whose intensity is
    1.45 +	 * greater than a certain threshold, it changes its color from
    1.46 +	 * grey to green.
    1.47 +	 * 
    1.48 +	 *  Each cube is also saving whatever it hears to a file.  The
    1.49 +	 *  scene from the perspective of the viewer is also saved to a
    1.50 +	 *  video file.  When you listen to each of the sound files
    1.51 +	 *  alongside the video, the sound will get louder when the sphere
    1.52 +	 *  approaches the cube that generated that sound file.  This
    1.53 +	 *  shows that each listener is hearing the world from its own
    1.54 +	 *  perspective.
    1.55 +	 * 
    1.56 +	 */
    1.57 +	public static void main(String[] args) {
    1.58 +		Advanced app = new Advanced();
    1.59 +		AppSettings settings = new AppSettings(true);
    1.60 +		settings.setAudioRenderer(AurellemSystemDelegate.SEND);
    1.61 +		JmeSystem.setSystemDelegate(new AurellemSystemDelegate());
    1.62 +		app.setSettings(settings);
    1.63 +		app.setShowSettings(false);
    1.64 +		app.setPauseOnLostFocus(false);
    1.65  		
    1.66 -	try {
    1.67 -	    Capture.captureVideo(app, File.createTempFile("advanced",".avi"));
    1.68 -	    Capture.captureAudio(app, File.createTempFile("advanced", ".wav"));
    1.69 -	}
    1.70 -	catch (IOException e) {e.printStackTrace();}
    1.71 -		
    1.72 -	app.start();
    1.73 -    }
    1.74 -
    1.75 -	
    1.76 -    private Geometry bell;
    1.77 -    private Geometry ear1;
    1.78 -    private Geometry ear2;
    1.79 -    private Geometry ear3;
    1.80 -    private AudioNode music;
    1.81 -    private MotionTrack motionControl;
    1.82 -		
    1.83 -    private Geometry makeEar(Node root, Vector3f position){
    1.84 -	Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
    1.85 -	Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));
    1.86 -	ear.setLocalTranslation(position);
    1.87 -	mat.setColor("Color", ColorRGBA.Green);
    1.88 -	ear.setMaterial(mat);
    1.89 -	root.attachChild(ear);
    1.90 -	return ear;
    1.91 -    } 
    1.92 -
    1.93 -    private Vector3f[] path = new Vector3f[]{
    1.94 -	// loop 1
    1.95 -	new Vector3f(0, 0, 0),
    1.96 -	new Vector3f(0, 0, -10),
    1.97 -	new Vector3f(-2, 0, -14),
    1.98 -	new Vector3f(-6, 0, -20),
    1.99 -	new Vector3f(0, 0, -26),
   1.100 -	new Vector3f(6, 0, -20),
   1.101 -	new Vector3f(0, 0, -14),
   1.102 -	new Vector3f(-6, 0, -20),
   1.103 -	new Vector3f(0, 0, -26),
   1.104 -	new Vector3f(6, 0, -20),
   1.105 -	// loop 2
   1.106 -	new Vector3f(5, 0, -5),
   1.107 -	new Vector3f(7, 0, 1.5f),
   1.108 -	new Vector3f(14, 0, 2),
   1.109 -	new Vector3f(20, 0, 6),
   1.110 -	new Vector3f(26, 0, 0),
   1.111 -	new Vector3f(20, 0, -6),
   1.112 -	new Vector3f(14, 0, 0),
   1.113 -	new Vector3f(20, 0, 6),
   1.114 -	new Vector3f(26, 0, 0),
   1.115 -	new Vector3f(20, 0, -6),
   1.116 -	new Vector3f(14, 0, 0),
   1.117 -	// loop 3
   1.118 -	new Vector3f(8, 0, 7.5f),
   1.119 -	new Vector3f(7, 0, 10.5f),
   1.120 -	new Vector3f(6, 0, 20),
   1.121 -	new Vector3f(0, 0, 26),
   1.122 -	new Vector3f(-6, 0, 20),
   1.123 -	new Vector3f(0, 0, 14),
   1.124 -	new Vector3f(6, 0, 20),
   1.125 -	new Vector3f(0, 0, 26),
   1.126 -	new Vector3f(-6, 0, 20),
   1.127 -	new Vector3f(0, 0, 14),
   1.128 -	// begin ellipse
   1.129 -	new Vector3f(16, 5, 20),
   1.130 -	new Vector3f(0, 0, 26),
   1.131 -	new Vector3f(-16, -10, 20),
   1.132 -	new Vector3f(0, 0, 14),
   1.133 -	new Vector3f(16, 20, 20),
   1.134 -	new Vector3f(0, 0, 26),
   1.135 -	new Vector3f(-10, -25, 10),
   1.136 -	new Vector3f(-10, 0, 0),
   1.137 -	// come at me!
   1.138 -	new Vector3f(-28.00242f, 48.005623f, -34.648228f),
   1.139 -	new Vector3f(0, 0 , -20),
   1.140 -    };
   1.141 -
   1.142 -    private void createScene() {
   1.143 -	Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.144 -	bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));
   1.145 -	mat.setColor("Color", ColorRGBA.Blue);
   1.146 -	bell.setMaterial(mat);
   1.147 -	rootNode.attachChild(bell);
   1.148 -
   1.149 -	ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));
   1.150 -	ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));
   1.151 -	ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));
   1.152 -
   1.153 -	MotionPath track = new MotionPath();
   1.154 -
   1.155 -	for (Vector3f v : path){
   1.156 -	    track.addWayPoint(v);
   1.157 -	}
   1.158 -	track.setCurveTension(0.80f);
   1.159 -
   1.160 -	motionControl = new MotionTrack(bell,track);
   1.161 -		
   1.162 -	// for now, use reflection to change the timer... 
   1.163 -	// motionControl.setTimer(new IsoTimer(60));
   1.164 -	try {
   1.165 -	    Field timerField;
   1.166 -	    timerField = AbstractCinematicEvent.class.getDeclaredField("timer");
   1.167 -	    timerField.setAccessible(true);
   1.168 -	    try {timerField.set(motionControl, new IsoTimer(60));} 
   1.169 -	    catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.170 -	    catch (IllegalAccessException e) {e.printStackTrace();}
   1.171 -	} 
   1.172 -	catch (SecurityException e) {e.printStackTrace();} 
   1.173 -	catch (NoSuchFieldException e) {e.printStackTrace();}
   1.174 -		
   1.175 -	motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);
   1.176 -	motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));
   1.177 -	motionControl.setInitialDuration(20f);
   1.178 -	motionControl.setSpeed(1f);
   1.179 -
   1.180 -	track.enableDebugShape(assetManager, rootNode);
   1.181 -	positionCamera();
   1.182 -    }
   1.183 -
   1.184 -
   1.185 -    private void positionCamera(){
   1.186 -	this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));
   1.187 -	this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));
   1.188 -    }
   1.189 -
   1.190 -    private void initAudio() {
   1.191 -	org.lwjgl.input.Mouse.setGrabbed(false);	
   1.192 -	music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false);
   1.193 -
   1.194 -	rootNode.attachChild(music);
   1.195 -	audioRenderer.playSource(music);
   1.196 -	music.setPositional(true);
   1.197 -	music.setVolume(1f);
   1.198 -	music.setReverbEnabled(false);
   1.199 -	music.setDirectional(false);
   1.200 -	music.setMaxDistance(200.0f);
   1.201 -	music.setRefDistance(1f);
   1.202 -	//music.setRolloffFactor(1f);
   1.203 -	music.setLooping(false);
   1.204 -	audioRenderer.pauseSource(music); 
   1.205 -    }
   1.206 -
   1.207 -    public class Dancer implements SoundProcessor {
   1.208 -	Geometry entity;
   1.209 -	float scale = 2;
   1.210 -	public Dancer(Geometry entity){
   1.211 -	    this.entity = entity;
   1.212 +		try {
   1.213 +			//Capture.captureVideo(app, File.createTempFile("advanced",".avi"));
   1.214 +			Capture.captureAudio(app, File.createTempFile("advanced", ".wav"));
   1.215 +		}
   1.216 +		catch (IOException e) {e.printStackTrace();}
   1.217 +		 
   1.218 +		app.start();
   1.219  	}
   1.220  
   1.221 -	/**
   1.222 -	 * this method is irrelevant since there is no state to cleanup.
   1.223 -	 */
   1.224 -	public void cleanup() {}
   1.225  
   1.226 +	private Geometry bell;
   1.227 +	private Geometry ear1;
   1.228 +	private Geometry ear2;
   1.229 +	private Geometry ear3;
   1.230 +	private AudioNode music;
   1.231 +	private MotionTrack motionControl;
   1.232 +	private IsoTimer motionTimer = new IsoTimer(60);
   1.233  
   1.234 -	/**
   1.235 -	 * Respond to sound!  This is the brain of an AI entity that 
   1.236 -	 * hears it's surroundings and reacts to them.
   1.237 -	 */
   1.238 -	public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {
   1.239 -	    audioSamples.clear();
   1.240 -	    byte[] data = new byte[numSamples];
   1.241 -	    float[] out = new float[numSamples];
   1.242 -	    audioSamples.get(data);
   1.243 -	    FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, 
   1.244 -						   numSamples/format.getFrameSize(), format);
   1.245 +	private Geometry makeEar(Node root, Vector3f position){
   1.246 +		Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.247 +		Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f));
   1.248 +		ear.setLocalTranslation(position);
   1.249 +		mat.setColor("Color", ColorRGBA.Green);
   1.250 +		ear.setMaterial(mat);
   1.251 +		root.attachChild(ear);
   1.252 +		return ear;
   1.253 +	} 
   1.254  
   1.255 -	    float max = Float.NEGATIVE_INFINITY;
   1.256 -	    for (float f : out){if (f > max) max = f;}
   1.257 -	    audioSamples.clear();
   1.258 +	private Vector3f[] path = new Vector3f[]{
   1.259 +			// loop 1
   1.260 +			new Vector3f(0, 0, 0),
   1.261 +			new Vector3f(0, 0, -10),
   1.262 +			new Vector3f(-2, 0, -14),
   1.263 +			new Vector3f(-6, 0, -20),
   1.264 +			new Vector3f(0, 0, -26),
   1.265 +			new Vector3f(6, 0, -20),
   1.266 +			new Vector3f(0, 0, -14),
   1.267 +			new Vector3f(-6, 0, -20),
   1.268 +			new Vector3f(0, 0, -26),
   1.269 +			new Vector3f(6, 0, -20),
   1.270 +			// loop 2
   1.271 +			new Vector3f(5, 0, -5),
   1.272 +			new Vector3f(7, 0, 1.5f),
   1.273 +			new Vector3f(14, 0, 2),
   1.274 +			new Vector3f(20, 0, 6),
   1.275 +			new Vector3f(26, 0, 0),
   1.276 +			new Vector3f(20, 0, -6),
   1.277 +			new Vector3f(14, 0, 0),
   1.278 +			new Vector3f(20, 0, 6),
   1.279 +			new Vector3f(26, 0, 0),
   1.280 +			new Vector3f(20, 0, -6),
   1.281 +			new Vector3f(14, 0, 0),
   1.282 +			// loop 3
   1.283 +			new Vector3f(8, 0, 7.5f),
   1.284 +			new Vector3f(7, 0, 10.5f),
   1.285 +			new Vector3f(6, 0, 20),
   1.286 +			new Vector3f(0, 0, 26),
   1.287 +			new Vector3f(-6, 0, 20),
   1.288 +			new Vector3f(0, 0, 14),
   1.289 +			new Vector3f(6, 0, 20),
   1.290 +			new Vector3f(0, 0, 26),
   1.291 +			new Vector3f(-6, 0, 20),
   1.292 +			new Vector3f(0, 0, 14),
   1.293 +			// begin ellipse
   1.294 +			new Vector3f(16, 5, 20),
   1.295 +			new Vector3f(0, 0, 26),
   1.296 +			new Vector3f(-16, -10, 20),
   1.297 +			new Vector3f(0, 0, 14),
   1.298 +			new Vector3f(16, 20, 20),
   1.299 +			new Vector3f(0, 0, 26),
   1.300 +			new Vector3f(-10, -25, 10),
   1.301 +			new Vector3f(-10, 0, 0),
   1.302 +			// come at me!
   1.303 +			new Vector3f(-28.00242f, 48.005623f, -34.648228f),
   1.304 +			new Vector3f(0, 0 , -20),
   1.305 +	};
   1.306  
   1.307 -	    if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}
   1.308 -	    else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}
   1.309 +	private void createScene() {
   1.310 +		Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
   1.311 +		bell = new Geometry( "sound-emitter" , new Sphere(15,15,1));
   1.312 +		mat.setColor("Color", ColorRGBA.Blue);
   1.313 +		bell.setMaterial(mat);
   1.314 +		rootNode.attachChild(bell);
   1.315 +
   1.316 +		ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20));
   1.317 +		ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20));
   1.318 +		ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0));
   1.319 +
   1.320 +		MotionPath track = new MotionPath();
   1.321 +
   1.322 +		for (Vector3f v : path){
   1.323 +			track.addWayPoint(v);
   1.324 +		}
   1.325 +		track.setCurveTension(0.80f);
   1.326 +
   1.327 +		motionControl = new MotionTrack(bell,track);
   1.328 +		// for now, use reflection to change the timer... 
   1.329 +		// motionControl.setTimer(new IsoTimer(60));
   1.330 +		
   1.331 +		try {
   1.332 +			Field timerField;
   1.333 +			timerField = AbstractCinematicEvent.class.getDeclaredField("timer");
   1.334 +			timerField.setAccessible(true);
   1.335 +			try {timerField.set(motionControl, motionTimer);} 
   1.336 +			catch (IllegalArgumentException e) {e.printStackTrace();} 
   1.337 +			catch (IllegalAccessException e) {e.printStackTrace();}
   1.338 +		} 
   1.339 +		catch (SecurityException e) {e.printStackTrace();} 
   1.340 +		catch (NoSuchFieldException e) {e.printStackTrace();}
   1.341 +
   1.342 +
   1.343 +		motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation);
   1.344 +		motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y));
   1.345 +		motionControl.setInitialDuration(20f);
   1.346 +		motionControl.setSpeed(1f);
   1.347 +
   1.348 +		track.enableDebugShape(assetManager, rootNode);
   1.349 +		positionCamera();
   1.350  	}
   1.351 -    }
   1.352  
   1.353 -    private void prepareEar(Geometry ear, int n){
   1.354 -	if (this.audioRenderer instanceof MultiListener){
   1.355 -	    MultiListener rf = (MultiListener)this.audioRenderer;
   1.356  
   1.357 -	    Listener auxListener = new Listener();
   1.358 -	    auxListener.setLocation(ear.getLocalTranslation());
   1.359 +	private void positionCamera(){
   1.360 +		this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f));
   1.361 +		this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f));
   1.362 +	}
   1.363  
   1.364 -	    rf.addListener(auxListener);
   1.365 -	    WaveFileWriter aux = null;
   1.366 +	private void initAudio() {
   1.367 +		org.lwjgl.input.Mouse.setGrabbed(false);	
   1.368 +		music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false);
   1.369  
   1.370 -	    try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} 
   1.371 -	    catch (FileNotFoundException e) {e.printStackTrace();}
   1.372 +		rootNode.attachChild(music);
   1.373 +		audioRenderer.playSource(music);
   1.374 +		music.setPositional(true);
   1.375 +		music.setVolume(1f);
   1.376 +		music.setReverbEnabled(false);
   1.377 +		music.setDirectional(false);
   1.378 +		music.setMaxDistance(200.0f);
   1.379 +		music.setRefDistance(1f);
   1.380 +		//music.setRolloffFactor(1f);
   1.381 +		music.setLooping(false);
   1.382 +		audioRenderer.pauseSource(music); 
   1.383 +	}
   1.384  
   1.385 -	    rf.registerSoundProcessor(auxListener, 
   1.386 -				      new CompositeSoundProcessor(new Dancer(ear), aux));
   1.387 -	}   
   1.388 -    }
   1.389 +	public class Dancer implements SoundProcessor {
   1.390 +		Geometry entity;
   1.391 +		float scale = 2;
   1.392 +		public Dancer(Geometry entity){
   1.393 +			this.entity = entity;
   1.394 +		}
   1.395  
   1.396 +		/**
   1.397 +		 * this method is irrelevant since there is no state to cleanup.
   1.398 +		 */
   1.399 +		public void cleanup() {}
   1.400  
   1.401 -    public void simpleInitApp() {
   1.402 -	this.setTimer(new IsoTimer(60));
   1.403 -	initAudio();
   1.404 -		
   1.405 -	createScene();
   1.406  
   1.407 -	prepareEar(ear1, 1);
   1.408 -	prepareEar(ear2, 1);
   1.409 -	prepareEar(ear3, 1);
   1.410 +		/**
   1.411 +		 * Respond to sound!  This is the brain of an AI entity that 
   1.412 +		 * hears its surroundings and reacts to them.
   1.413 +		 */
   1.414 +		public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) {
   1.415 +			audioSamples.clear();
   1.416 +			byte[] data = new byte[numSamples];
   1.417 +			float[] out = new float[numSamples];
   1.418 +			audioSamples.get(data);
   1.419 +			FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, 
   1.420 +					numSamples/format.getFrameSize(), format);
   1.421  
   1.422 -	motionControl.play();
   1.423 -    }
   1.424 +			float max = Float.NEGATIVE_INFINITY;
   1.425 +			for (float f : out){if (f > max) max = f;}
   1.426 +			audioSamples.clear();
   1.427  
   1.428 -    public void simpleUpdate(float tpf) {
   1.429 -	if (music.getStatus() != AudioNode.Status.Playing){
   1.430 -	    music.play();
   1.431 +			if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);}
   1.432 +			else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);}
   1.433 +		}
   1.434  	}
   1.435 -	Vector3f loc = cam.getLocation();
   1.436 -	Quaternion rot = cam.getRotation();
   1.437 -	listener.setLocation(loc);
   1.438 -	listener.setRotation(rot);
   1.439 -	music.setLocalTranslation(bell.getLocalTranslation());
   1.440 -    }
   1.441 +
   1.442 +	private void prepareEar(Geometry ear, int n){
   1.443 +		if (this.audioRenderer instanceof MultiListener){
   1.444 +			MultiListener rf = (MultiListener)this.audioRenderer;
   1.445 +
   1.446 +			Listener auxListener = new Listener();
   1.447 +			auxListener.setLocation(ear.getLocalTranslation());
   1.448 +
   1.449 +			rf.addListener(auxListener);
   1.450 +			WaveFileWriter aux = null;
   1.451 +
   1.452 +			try {aux = new WaveFileWriter(File.createTempFile("advanced-audio-" + n, ".wav"));} 
   1.453 +			catch (IOException e) {e.printStackTrace();}
   1.454 +
   1.455 +			rf.registerSoundProcessor(auxListener, 
   1.456 +					new CompositeSoundProcessor(new Dancer(ear), aux));
   1.457 +					
   1.458 +		}   
   1.459 +	}
   1.460 +
   1.461 +
   1.462 +	public void simpleInitApp() {
   1.463 +		this.setTimer(new IsoTimer(60));
   1.464 +		initAudio();
   1.465 +
   1.466 +		createScene();
   1.467 +
   1.468 +		prepareEar(ear1, 1);
   1.469 +		prepareEar(ear2, 1);
   1.470 +		prepareEar(ear3, 1);
   1.471 +
   1.472 +		motionControl.play();
   1.473 +
   1.474 +	}
   1.475 +
   1.476 +	public void simpleUpdate(float tpf) {
   1.477 +		motionTimer.update();
   1.478 +		if (music.getStatus() != AudioNode.Status.Playing){
   1.479 +			music.play();
   1.480 +		}
   1.481 +		Vector3f loc = cam.getLocation();
   1.482 +		Quaternion rot = cam.getRotation();
   1.483 +		listener.setLocation(loc);
   1.484 +		listener.setRotation(rot);
   1.485 +		music.setLocalTranslation(bell.getLocalTranslation());
   1.486 +	}
   1.487  
   1.488  }