# HG changeset patch # User Robert McIntyre # Date 1320364543 25200 # Node ID 58386a64d019c849df2904c7b1507344a3c99f12 # Parent 56dc950feaed57c4ddaaed3002ea0155304a6edc renamed stuff diff -r 56dc950feaed -r 58386a64d019 src/com/aurellem/capture/examples/Advanced.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/examples/Advanced.java Thu Nov 03 16:55:43 2011 -0700 @@ -0,0 +1,277 @@ +package com.aurellem.capture.examples; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.logging.Level; +import java.util.logging.Logger; + +import javax.sound.sampled.AudioFormat; + +import org.tritonus.share.sampled.FloatSampleTools; + +import com.aurellem.capture.Capture; +import com.aurellem.capture.IsoTimer; +import com.aurellem.capture.audio.CompositeSoundProcessor; +import com.aurellem.capture.audio.MultiListener; +import com.aurellem.capture.audio.SoundProcessor; +import com.aurellem.capture.audio.WaveFileWriter; +import com.jme3.app.SimpleApplication; +import com.jme3.audio.AudioNode; +import com.jme3.audio.Listener; +import com.jme3.cinematic.MotionPath; +import com.jme3.cinematic.events.MotionTrack; +import com.jme3.material.Material; +import com.jme3.math.ColorRGBA; +import com.jme3.math.FastMath; +import com.jme3.math.Quaternion; +import com.jme3.math.Vector3f; +import com.jme3.scene.Geometry; +import com.jme3.scene.Node; +import com.jme3.scene.shape.Box; +import com.jme3.scene.shape.Sphere; +import com.jme3.system.AppSettings; + + +/** + * + * Demonstrates advanced use of the audio capture and recording features. + * Multiple perspectives of the same scene are simultaneously rendered to + * different sound files. + * + * A key limitation of the way multiple listeners are implemented is that + * only 3D positioning effects are realized for listeners other than the + * main LWJGL listener. This means that audio effects such as environment + * settings will *not* be heard on any auxiliary listeners, though sound + * attenuation will work correctly. + * + * Multiple listeners as realized here might be used to make AI entities + * that can each hear the world from their own perspective. + * + * @author Robert McIntyre + * + */ + +public class Advanced extends SimpleApplication { + + + private Geometry bell; + private Geometry ear1; + private Geometry ear2; + private Geometry ear3; + private AudioNode music; + private MotionTrack motionControl; + + public static void main(String[] args) { + Logger.getLogger("com.jme3").setLevel(Level.OFF); + Advanced app = new Advanced(); + AppSettings settings = new AppSettings(true); + settings.setAudioRenderer("Send"); + app.setSettings(settings); + app.setShowSettings(false); + app.setPauseOnLostFocus(false); + + try { + Capture.captureVideo(app, File.createTempFile("advanced",".avi")); + Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); + } + catch (IOException e) {e.printStackTrace();} + app.start(); + } + + private Geometry makeEar(Node root, Vector3f position){ + Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); + Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); + ear.setLocalTranslation(position); + mat.setColor("Color", ColorRGBA.Green); + ear.setMaterial(mat); + root.attachChild(ear); + return ear; + } + + private Vector3f[] path = new Vector3f[]{ + // loop 1 + new Vector3f(0, 0, 0), + new Vector3f(0, 0, -10), + new Vector3f(-2, 0, -14), + new Vector3f(-6, 0, -20), + new Vector3f(0, 0, -26), + new Vector3f(6, 0, -20), + new Vector3f(0, 0, -14), + new Vector3f(-6, 0, -20), + new Vector3f(0, 0, -26), + new Vector3f(6, 0, -20), + // loop 2 + new Vector3f(5, 0, -5), + new Vector3f(7, 0, 1.5f), + new Vector3f(14, 0, 2), + new Vector3f(20, 0, 6), + new Vector3f(26, 0, 0), + new Vector3f(20, 0, -6), + new Vector3f(14, 0, 0), + new Vector3f(20, 0, 6), + new Vector3f(26, 0, 0), + new Vector3f(20, 0, -6), + new Vector3f(14, 0, 0), + // loop 3 + new Vector3f(8, 0, 7.5f), + new Vector3f(7, 0, 10.5f), + new Vector3f(6, 0, 20), + new Vector3f(0, 0, 26), + new Vector3f(-6, 0, 20), + new Vector3f(0, 0, 14), + new Vector3f(6, 0, 20), + new Vector3f(0, 0, 26), + new Vector3f(-6, 0, 20), + new Vector3f(0, 0, 14), + // begin ellipse + new Vector3f(16, 5, 20), + new Vector3f(0, 0, 26), + new Vector3f(-16, -10, 20), + new Vector3f(0, 0, 14), + new Vector3f(16, 20, 20), + new Vector3f(0, 0, 26), + new Vector3f(-10, -25, 10), + new Vector3f(-10, 0, 0), + // come at me! + new Vector3f(-28.00242f, 48.005623f, -34.648228f), + new Vector3f(0, 0 , -20), + }; + + private void createScene() { + Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); + bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); + mat.setColor("Color", ColorRGBA.Blue); + bell.setMaterial(mat); + rootNode.attachChild(bell); + + ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); + ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); + ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); + + MotionPath track = new MotionPath(); + + for (Vector3f v : path){ + track.addWayPoint(v); + } + track.setCurveTension(0.80f); + + motionControl = new MotionTrack(bell,track); + motionControl.setTimer(new IsoTimer(60)); + motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); + motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); + motionControl.setInitialDuration(20f); + motionControl.setSpeed(1f); + + track.enableDebugShape(assetManager, rootNode); + positionCamera(); + } + + + private void positionCamera(){ + this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); + this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); + } + + private void initAudio() { + org.lwjgl.input.Mouse.setGrabbed(false); + music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); + + rootNode.attachChild(music); + audioRenderer.playSource(music); + music.setPositional(true); + music.setVolume(1f); + music.setReverbEnabled(false); + music.setDirectional(false); + music.setMaxDistance(200.0f); + music.setRefDistance(1f); + music.setRolloffFactor(1f); + music.setLooping(false); + audioRenderer.pauseSource(music); + } + + + + + + + public class Dancer implements SoundProcessor { + Geometry entity; + float scale = 2; + public Dancer(Geometry entity){ + this.entity = entity; + } + + /** + * this method is irrelevant since there is no state to cleanup. + */ + public void cleanup() {} + + + /** + * Respond to sound! This is the brain of an AI entity that + * hears it's surroundings and reacts to them. + */ + public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { + audioSamples.clear(); + byte[] data = new byte[numSamples]; + float[] out = new float[numSamples]; + audioSamples.get(data); + FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, + numSamples/format.getFrameSize(), format); + + float max = Float.NEGATIVE_INFINITY; + for (float f : out){if (f > max) max = f;} + audioSamples.clear(); + + if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} + else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} + } + } + + private void prepareEar(Geometry ear, int n){ + if (this.audioRenderer instanceof MultiListener){ + MultiListener rf = (MultiListener)this.audioRenderer; + + Listener auxListener = new Listener(); + auxListener.setLocation(ear.getLocalTranslation()); + + rf.addListener(auxListener); + WaveFileWriter aux = null; + + try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} + catch (FileNotFoundException e) {e.printStackTrace();} + + rf.registerSoundProcessor(auxListener, + new CompositeSoundProcessor(new Dancer(ear), aux)); + + } + } + + + public void simpleInitApp() { + this.setTimer(new IsoTimer(60)); + initAudio(); + + createScene(); + + prepareEar(ear1, 1); + prepareEar(ear2, 1); + prepareEar(ear3, 1); + + motionControl.play(); + } + + public void simpleUpdate(float tpf) { + if (music.getStatus() != AudioNode.Status.Playing){ + music.play(); + } + Vector3f loc = cam.getLocation(); + Quaternion rot = cam.getRotation(); + listener.setLocation(loc); + listener.setRotation(rot); + music.setLocalTranslation(bell.getLocalTranslation()); + } + +} diff -r 56dc950feaed -r 58386a64d019 src/com/aurellem/capture/examples/AdvancedAudio.java --- a/src/com/aurellem/capture/examples/AdvancedAudio.java Thu Nov 03 16:39:32 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,389 +0,0 @@ -package com.aurellem.capture.examples; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.logging.Level; -import java.util.logging.Logger; - -import javax.sound.sampled.AudioFormat; - -import org.tritonus.share.sampled.FloatSampleTools; - -import com.aurellem.capture.Capture; -import com.aurellem.capture.IsoTimer; -import com.aurellem.capture.audio.CompositeSoundProcessor; -import com.aurellem.capture.audio.MultiListener; -import com.aurellem.capture.audio.SoundProcessor; -import com.aurellem.capture.audio.WaveFileWriter; -import com.jme3.app.SimpleApplication; -import com.jme3.audio.AudioNode; -import com.jme3.audio.Listener; -import com.jme3.audio.ListenerParam; -import com.jme3.cinematic.MotionPath; -import com.jme3.cinematic.events.MotionTrack; -import com.jme3.input.controls.ActionListener; -import com.jme3.input.controls.MouseButtonTrigger; -import com.jme3.light.DirectionalLight; -import com.jme3.material.Material; -import com.jme3.math.ColorRGBA; -import com.jme3.math.FastMath; -import com.jme3.math.Quaternion; -import com.jme3.math.Vector3f; -import com.jme3.scene.Geometry; -import com.jme3.scene.Node; -import com.jme3.scene.shape.Box; -import com.jme3.scene.shape.Sphere; -import com.jme3.system.AppSettings; - - -/** - * - * Demonstrates advanced use of the audio capture and recording features. - * Multiple perspectives of the same scene are simultaneously rendered to - * different sound files. - * - * A key limitation of the way multiple listeners are implemented is that - * only 3D positioning effects are realized for listeners other than the - * main LWJGL listener. This means that audio effects such as environment - * settings will *not* be heard on any auxiliary listeners, though sound - * attenuation will work correctly. - * - * Multiple listeners as realized here might be used to make AI entities - * that can each hear the world from their own perspective. - * - * @author Robert McIntyre - * - */ - -public class AdvancedAudio extends SimpleApplication { - - public static void main(String[] args) { - Logger.getLogger("com.jme3").setLevel(Level.OFF); - AdvancedAudio app = new AdvancedAudio(); - AppSettings settings = new AppSettings(true); - settings.setAudioRenderer("Send"); - app.setSettings(settings); - app.setShowSettings(false); - app.setPauseOnLostFocus(false); - - try {Capture.captureVideo(app, new File("/home/r/tmp/out")); - Capture.captureAudio(app, new File("/home/r/tmp/main.wav"));} - catch (IOException e) {e.printStackTrace();} - app.start(); - - } - - private MotionTrack motionControl; - - - private Geometry makeEar(Node root, Vector3f position){ - Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); - Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); - ear.setLocalTranslation(position); - mat.setColor("Color", ColorRGBA.Green); - ear.setMaterial(mat); - root.attachChild(ear); - return ear; - } - - private Geometry bell; - - private Geometry ear1; - private Geometry ear2; - private Geometry ear3; - - - - private Vector3f[] path = new Vector3f[]{ - // loop 1 - new Vector3f(0, 0, 0), - new Vector3f(0, 0, -10), - new Vector3f(-2, 0, -14), - new Vector3f(-6, 0, -20), - new Vector3f(0, 0, -26), - new Vector3f(6, 0, -20), - new Vector3f(0, 0, -14), - new Vector3f(-6, 0, -20), - new Vector3f(0, 0, -26), - new Vector3f(6, 0, -20), - // loop 2 - new Vector3f(5, 0, -5), - new Vector3f(7, 0, 1.5f), - new Vector3f(14, 0, 2), - new Vector3f(20, 0, 6), - new Vector3f(26, 0, 0), - new Vector3f(20, 0, -6), - new Vector3f(14, 0, 0), - new Vector3f(20, 0, 6), - new Vector3f(26, 0, 0), - new Vector3f(20, 0, -6), - new Vector3f(14, 0, 0), - // loop 3 - new Vector3f(8, 0, 7.5f), - new Vector3f(7, 0, 10.5f), - new Vector3f(6, 0, 20), - new Vector3f(0, 0, 26), - new Vector3f(-6, 0, 20), - new Vector3f(0, 0, 14), - new Vector3f(6, 0, 20), - new Vector3f(0, 0, 26), - new Vector3f(-6, 0, 20), - new Vector3f(0, 0, 14), - // begin ellipse - new Vector3f(16, 5, 20), - new Vector3f(0, 0, 26), - new Vector3f(-16, -10, 20), - new Vector3f(0, 0, 14), - new Vector3f(16, 20, 20), - new Vector3f(0, 0, 26), - new Vector3f(-10, -25, 10), - new Vector3f(-10, 0, 0), - // come at me bro! - new Vector3f(-28.00242f, 48.005623f, -34.648228f), - new Vector3f(0, 0 , -20), - }; - - - - private void createScene() { - Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); - bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); - mat.setColor("Color", ColorRGBA.Blue); - bell.setMaterial(mat); - rootNode.attachChild(bell); - - DirectionalLight light = new DirectionalLight(); - light.setDirection(new Vector3f(0, -1, 0).normalizeLocal()); - light.setColor(ColorRGBA.White.mult(1.5f)); - rootNode.addLight(light); - - ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); - ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); - ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); - - - MotionPath track = new MotionPath(); - - for (Vector3f v : path){ - track.addWayPoint(v); - } - - - track.setCurveTension(0.80f); - - - motionControl = new MotionTrack(bell,track); - motionControl.setTimer(new IsoTimer(60)); - motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); - motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); - motionControl.setInitialDuration(20f); - motionControl.setSpeed(1f); - - - track.enableDebugShape(assetManager, rootNode); - - - positionCamera(); - - - } - - - private void positionCamera(){ - this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); - // cam.setLocation(new Vector3f(0,0,-20)); - this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); - } - - private AudioNode music; - - - - - - private void initAudio() { - org.lwjgl.input.Mouse.setGrabbed(false); - music = new AudioNode(assetManager, "Sound/Environment/sqr-1kHz.wav", false); - - rootNode.attachChild(music); - audioRenderer.playSource(music); - music.setPositional(true); - music.setVolume(1f); - music.setReverbEnabled(false); - music.setDirectional(false); - music.setMaxDistance(200.0f); - music.setRefDistance(1f); - music.setRolloffFactor(1f); - music.setLooping(false); - audioRenderer.pauseSource(music); - - } - - - - - private Listener auxListener; - //public File data1 = new File("/home/r/tmp/data1.wav"); - //public File data2 = new File("/home/r/tmp/data2.wav"); - //public File data3 = new File("/home/r/tmp/data3.wav"); - //public File data4 = new File("/home/r/tmp/data4.wav"); - //public File data5 = new File("/home/r/tmp/data5.wav"); - //public File data6 = new File("/home/r/tmp/data6.wav"); - - - public class Dancer implements SoundProcessor { - Geometry entity; - float scale = 2; - public Dancer(Geometry entity){ - this.entity = entity; - } - - /** - * this method is irrelevant since there is no state to cleanup. - */ - public void cleanup() {} - - - /** - * Dance to the beat! This is the brain of an AI entity that - * hears it's surroundings and reacts to them. - */ - public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { - audioSamples.clear(); - byte[] data = new byte[numSamples]; - float[] out = new float[numSamples]; - audioSamples.get(data); - FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, - numSamples/format.getFrameSize(), format); - - float max = Float.NEGATIVE_INFINITY; - for (float f : out){if (f > max) max = f;} - audioSamples.clear(); - - if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} - else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} - - //entity.scale(this.scale); - //if (this.scale == 2f){this.scale = 0.5f;} - //else {this.scale = 2;} - } - - - } - - - - private void prepareEar(Geometry ear, int n){ - if (this.audioRenderer instanceof MultiListener){ - MultiListener rf = (MultiListener)this.audioRenderer; - - auxListener = new Listener(); - auxListener.setLocation(ear.getLocalTranslation()); - - rf.addListener(auxListener); - WaveFileWriter aux = null; - - try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} - catch (FileNotFoundException e) {e.printStackTrace();} - - rf.registerSoundProcessor(auxListener, - new CompositeSoundProcessor(new Dancer(ear), aux)); - - } - } - - - public void simpleInitApp() { - this.setTimer(new IsoTimer(60)); - initAudio(); - initKeys(); - createScene(); - - prepareEar(ear1, 1); - prepareEar(ear2, 1); - prepareEar(ear3, 1); - - motionControl.play(); - } - - - - - - private void initKeys() { - inputManager.addMapping("Shoot", new MouseButtonTrigger(0)); - inputManager.addListener(actionListener, "Shoot"); - } - - /** Defining the "Shoot" action: Play a gun sound. */ - private ActionListener actionListener = new ActionListener() { - @Override - public void onAction(String name, boolean keyPressed, float tpf) { - if (name.equals("Shoot") && !keyPressed) { - - System.out.println(bell.getLocalTranslation().subtract(listener.getLocation()).length()); - //bell.getMaterial().setColor("Color", ColorRGBA.randomColor()); - //audioRenderer.playSource(music); - System.out.println(music.getRefDistance()); - - } - } - }; - - /** Move the listener with the camera - for 3D audio. */ - - - //private Vector3f prevBellPos = Vector3f.ZERO; - private int countdown = 0; - - public void simpleUpdate(float tpf) { - if (countdown == 0){ - music.play(); - } - Vector3f loc = cam.getLocation(); - Quaternion rot = cam.getRotation(); - listener.setLocation(loc); - listener.setRotation(rot); - audioRenderer.updateListenerParam(listener, ListenerParam.Rotation); - - //System.out.println(countdown); - - //if (countdown++ == 300) { this.requestClose(false);} - - //System.out.println("channel "+ music.getChannel()); - //listener.setLocation(cam.getLocation()); - //listener.setRotation(cam.getRotation()); - //auxListener.setLocation(loc); - //auxListener.setRotation(rot); - //if (music.getStatus() != AudioNode.Status.Playing){ - //audioRenderer.playSource(music); - //music.play(); - // bell.getMaterial().setColor("Color", ColorRGBA.randomColor()); - //System.out.println("I'm playing! <3"); - //} - //audioRenderer.updateSourceParam(music, AudioParam.Direction); - - //Vector3f bellVelocity = bell.getLocalTranslation().subtract(prevBellPos).mult(1.0f/tpf); - //prevBellPos = bell.getLocalTranslation(); - - music.setLocalTranslation(bell.getLocalTranslation()); - - //System.out.println("distance: " + - // music.getLocalTranslation().subtract(listener.getLocation()).length()); - - //music.setVelocity(bellVelocity); - - //audioRenderer.updateSourceParam(music, AudioParam.Position); - //audioRenderer.updateSourceParam(music, AudioParam.Velocity); - - - //System.out.println("main:" + listener.getVolume()); - //System.out.println("aux:" + auxListener.getVolume()); - //org.lwjgl.openal.AL10.alSourcef(1, org.lwjgl.openal.AL10.AL_MIN_GAIN, 0f); - //org.lwjgl.openal.AL10.alSourcef(1, org.lwjgl.openal.AL10.AL_ROLLOFF_FACTOR, 5f); - - } - -} diff -r 56dc950feaed -r 58386a64d019 src/com/aurellem/capture/examples/AdvancedVideo.java --- a/src/com/aurellem/capture/examples/AdvancedVideo.java Thu Nov 03 16:39:32 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,75 +0,0 @@ -package com.aurellem.capture.examples; - -import java.io.File; -import java.io.IOException; - -import com.aurellem.capture.Capture; -import com.aurellem.capture.IsoTimer; -import com.aurellem.capture.video.AbstractVideoRecorder; -import com.jme3.app.SimpleApplication; -import com.jme3.material.Material; -import com.jme3.math.ColorRGBA; -import com.jme3.math.Vector3f; -import com.jme3.scene.Geometry; -import com.jme3.scene.shape.Box; - -/** Recording Video from an application suitable for upload to youtube.*/ -public class AdvancedVideo extends SimpleApplication { - - /*File staticVideo = - new File("/home/r/bullshit.avi"); - */ - File movingVideo = - new File("/home/r/tmp/bullshit2.flv"); - - AbstractVideoRecorder movingRecorder ; - - public static void main(String[] args){ - AdvancedVideo app = new AdvancedVideo(); - app.start(); - } - - public void initVideo(){ - this.setTimer(new IsoTimer(60)); - /*try{ - // set the timer to 30fps lock-step - this.setTimer(new IsoTimer(30)); - - //ViewPort compositeViewPort = renderManager.createFinalView("composite", cam); - //compositeViewPort.attachScene(this.rootNode); - //compositeViewPort.attachScene(this.guiNode); - this.viewPort.setClearFlags(true, true, true); - this.viewPort.setBackgroundColor(ColorRGBA.Black); - movingRecorder = new AVIVideoRecorder(movingVideo); - this.stateManager.attach(movingRecorder); - this.viewPort.addFinalProcessor(movingRecorder); - this.viewPort.attachScene(this.guiNode); - - }catch (IOException e) { - e.printStackTrace();} - */ - try {Capture.captureVideo(this, movingVideo);} - catch (IOException e) {e.printStackTrace();} - - } - protected Geometry player; - - public void simpleInitApp() { - initVideo(); // begin recording! - /** this blue box is our player character */ - Box b = new Box(Vector3f.ZERO, 1, 1, 1); - player = new Geometry("blue cube", b); - Material mat = new Material(assetManager, - "Common/MatDefs/Misc/Unshaded.j3md"); - mat.setColor("Color", ColorRGBA.Blue); - player.setMaterial(mat); - rootNode.attachChild(player); - } - - /* Use the main event loop to trigger repeating actions. */ - public void simpleUpdate(float tpf) { - // make the player rotate: - player.rotate(0, 2*tpf, 0); - } - -} \ No newline at end of file