Mercurial > jmeCapture
comparison src/com/aurellem/capture/examples/Advanced.java @ 41:58386a64d019
renamed stuff
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Thu, 03 Nov 2011 16:55:43 -0700 |
parents | |
children | b1bc965a38d2 |
comparison
equal
deleted
inserted
replaced
40:56dc950feaed | 41:58386a64d019 |
---|---|
1 package com.aurellem.capture.examples; | |
2 | |
3 import java.io.File; | |
4 import java.io.FileNotFoundException; | |
5 import java.io.IOException; | |
6 import java.nio.ByteBuffer; | |
7 import java.util.logging.Level; | |
8 import java.util.logging.Logger; | |
9 | |
10 import javax.sound.sampled.AudioFormat; | |
11 | |
12 import org.tritonus.share.sampled.FloatSampleTools; | |
13 | |
14 import com.aurellem.capture.Capture; | |
15 import com.aurellem.capture.IsoTimer; | |
16 import com.aurellem.capture.audio.CompositeSoundProcessor; | |
17 import com.aurellem.capture.audio.MultiListener; | |
18 import com.aurellem.capture.audio.SoundProcessor; | |
19 import com.aurellem.capture.audio.WaveFileWriter; | |
20 import com.jme3.app.SimpleApplication; | |
21 import com.jme3.audio.AudioNode; | |
22 import com.jme3.audio.Listener; | |
23 import com.jme3.cinematic.MotionPath; | |
24 import com.jme3.cinematic.events.MotionTrack; | |
25 import com.jme3.material.Material; | |
26 import com.jme3.math.ColorRGBA; | |
27 import com.jme3.math.FastMath; | |
28 import com.jme3.math.Quaternion; | |
29 import com.jme3.math.Vector3f; | |
30 import com.jme3.scene.Geometry; | |
31 import com.jme3.scene.Node; | |
32 import com.jme3.scene.shape.Box; | |
33 import com.jme3.scene.shape.Sphere; | |
34 import com.jme3.system.AppSettings; | |
35 | |
36 | |
37 /** | |
38 * | |
39 * Demonstrates advanced use of the audio capture and recording features. | |
40 * Multiple perspectives of the same scene are simultaneously rendered to | |
41 * different sound files. | |
42 * | |
43 * A key limitation of the way multiple listeners are implemented is that | |
44 * only 3D positioning effects are realized for listeners other than the | |
45 * main LWJGL listener. This means that audio effects such as environment | |
46 * settings will *not* be heard on any auxiliary listeners, though sound | |
47 * attenuation will work correctly. | |
48 * | |
49 * Multiple listeners as realized here might be used to make AI entities | |
50 * that can each hear the world from their own perspective. | |
51 * | |
52 * @author Robert McIntyre | |
53 * | |
54 */ | |
55 | |
56 public class Advanced extends SimpleApplication { | |
57 | |
58 | |
59 private Geometry bell; | |
60 private Geometry ear1; | |
61 private Geometry ear2; | |
62 private Geometry ear3; | |
63 private AudioNode music; | |
64 private MotionTrack motionControl; | |
65 | |
66 public static void main(String[] args) { | |
67 Logger.getLogger("com.jme3").setLevel(Level.OFF); | |
68 Advanced app = new Advanced(); | |
69 AppSettings settings = new AppSettings(true); | |
70 settings.setAudioRenderer("Send"); | |
71 app.setSettings(settings); | |
72 app.setShowSettings(false); | |
73 app.setPauseOnLostFocus(false); | |
74 | |
75 try { | |
76 Capture.captureVideo(app, File.createTempFile("advanced",".avi")); | |
77 Capture.captureAudio(app, File.createTempFile("advanced", ".wav")); | |
78 } | |
79 catch (IOException e) {e.printStackTrace();} | |
80 app.start(); | |
81 } | |
82 | |
83 private Geometry makeEar(Node root, Vector3f position){ | |
84 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
85 Geometry ear = new Geometry("ear", new Box(1.0f, 1.0f, 1.0f)); | |
86 ear.setLocalTranslation(position); | |
87 mat.setColor("Color", ColorRGBA.Green); | |
88 ear.setMaterial(mat); | |
89 root.attachChild(ear); | |
90 return ear; | |
91 } | |
92 | |
93 private Vector3f[] path = new Vector3f[]{ | |
94 // loop 1 | |
95 new Vector3f(0, 0, 0), | |
96 new Vector3f(0, 0, -10), | |
97 new Vector3f(-2, 0, -14), | |
98 new Vector3f(-6, 0, -20), | |
99 new Vector3f(0, 0, -26), | |
100 new Vector3f(6, 0, -20), | |
101 new Vector3f(0, 0, -14), | |
102 new Vector3f(-6, 0, -20), | |
103 new Vector3f(0, 0, -26), | |
104 new Vector3f(6, 0, -20), | |
105 // loop 2 | |
106 new Vector3f(5, 0, -5), | |
107 new Vector3f(7, 0, 1.5f), | |
108 new Vector3f(14, 0, 2), | |
109 new Vector3f(20, 0, 6), | |
110 new Vector3f(26, 0, 0), | |
111 new Vector3f(20, 0, -6), | |
112 new Vector3f(14, 0, 0), | |
113 new Vector3f(20, 0, 6), | |
114 new Vector3f(26, 0, 0), | |
115 new Vector3f(20, 0, -6), | |
116 new Vector3f(14, 0, 0), | |
117 // loop 3 | |
118 new Vector3f(8, 0, 7.5f), | |
119 new Vector3f(7, 0, 10.5f), | |
120 new Vector3f(6, 0, 20), | |
121 new Vector3f(0, 0, 26), | |
122 new Vector3f(-6, 0, 20), | |
123 new Vector3f(0, 0, 14), | |
124 new Vector3f(6, 0, 20), | |
125 new Vector3f(0, 0, 26), | |
126 new Vector3f(-6, 0, 20), | |
127 new Vector3f(0, 0, 14), | |
128 // begin ellipse | |
129 new Vector3f(16, 5, 20), | |
130 new Vector3f(0, 0, 26), | |
131 new Vector3f(-16, -10, 20), | |
132 new Vector3f(0, 0, 14), | |
133 new Vector3f(16, 20, 20), | |
134 new Vector3f(0, 0, 26), | |
135 new Vector3f(-10, -25, 10), | |
136 new Vector3f(-10, 0, 0), | |
137 // come at me! | |
138 new Vector3f(-28.00242f, 48.005623f, -34.648228f), | |
139 new Vector3f(0, 0 , -20), | |
140 }; | |
141 | |
142 private void createScene() { | |
143 Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); | |
144 bell = new Geometry( "sound-emitter" , new Sphere(15,15,1)); | |
145 mat.setColor("Color", ColorRGBA.Blue); | |
146 bell.setMaterial(mat); | |
147 rootNode.attachChild(bell); | |
148 | |
149 ear1 = makeEar(rootNode, new Vector3f(0, 0 ,-20)); | |
150 ear2 = makeEar(rootNode, new Vector3f(0, 0 ,20)); | |
151 ear3 = makeEar(rootNode, new Vector3f(20, 0 ,0)); | |
152 | |
153 MotionPath track = new MotionPath(); | |
154 | |
155 for (Vector3f v : path){ | |
156 track.addWayPoint(v); | |
157 } | |
158 track.setCurveTension(0.80f); | |
159 | |
160 motionControl = new MotionTrack(bell,track); | |
161 motionControl.setTimer(new IsoTimer(60)); | |
162 motionControl.setDirectionType(MotionTrack.Direction.PathAndRotation); | |
163 motionControl.setRotation(new Quaternion().fromAngleNormalAxis(-FastMath.HALF_PI, Vector3f.UNIT_Y)); | |
164 motionControl.setInitialDuration(20f); | |
165 motionControl.setSpeed(1f); | |
166 | |
167 track.enableDebugShape(assetManager, rootNode); | |
168 positionCamera(); | |
169 } | |
170 | |
171 | |
172 private void positionCamera(){ | |
173 this.cam.setLocation(new Vector3f(-28.00242f, 48.005623f, -34.648228f)); | |
174 this.cam.setRotation(new Quaternion(0.3359635f, 0.34280345f, -0.13281013f, 0.8671653f)); | |
175 } | |
176 | |
177 private void initAudio() { | |
178 org.lwjgl.input.Mouse.setGrabbed(false); | |
179 music = new AudioNode(assetManager, "Sound/Effects/Beep.ogg", false); | |
180 | |
181 rootNode.attachChild(music); | |
182 audioRenderer.playSource(music); | |
183 music.setPositional(true); | |
184 music.setVolume(1f); | |
185 music.setReverbEnabled(false); | |
186 music.setDirectional(false); | |
187 music.setMaxDistance(200.0f); | |
188 music.setRefDistance(1f); | |
189 music.setRolloffFactor(1f); | |
190 music.setLooping(false); | |
191 audioRenderer.pauseSource(music); | |
192 } | |
193 | |
194 | |
195 | |
196 | |
197 | |
198 | |
199 public class Dancer implements SoundProcessor { | |
200 Geometry entity; | |
201 float scale = 2; | |
202 public Dancer(Geometry entity){ | |
203 this.entity = entity; | |
204 } | |
205 | |
206 /** | |
207 * this method is irrelevant since there is no state to cleanup. | |
208 */ | |
209 public void cleanup() {} | |
210 | |
211 | |
212 /** | |
213 * Respond to sound! This is the brain of an AI entity that | |
214 * hears it's surroundings and reacts to them. | |
215 */ | |
216 public void process(ByteBuffer audioSamples, int numSamples, AudioFormat format) { | |
217 audioSamples.clear(); | |
218 byte[] data = new byte[numSamples]; | |
219 float[] out = new float[numSamples]; | |
220 audioSamples.get(data); | |
221 FloatSampleTools.byte2floatInterleaved(data, 0, out, 0, | |
222 numSamples/format.getFrameSize(), format); | |
223 | |
224 float max = Float.NEGATIVE_INFINITY; | |
225 for (float f : out){if (f > max) max = f;} | |
226 audioSamples.clear(); | |
227 | |
228 if (max > 0.1){entity.getMaterial().setColor("Color", ColorRGBA.Green);} | |
229 else {entity.getMaterial().setColor("Color", ColorRGBA.Gray);} | |
230 } | |
231 } | |
232 | |
233 private void prepareEar(Geometry ear, int n){ | |
234 if (this.audioRenderer instanceof MultiListener){ | |
235 MultiListener rf = (MultiListener)this.audioRenderer; | |
236 | |
237 Listener auxListener = new Listener(); | |
238 auxListener.setLocation(ear.getLocalTranslation()); | |
239 | |
240 rf.addListener(auxListener); | |
241 WaveFileWriter aux = null; | |
242 | |
243 try {aux = new WaveFileWriter(new File("/home/r/tmp/ear"+n+".wav"));} | |
244 catch (FileNotFoundException e) {e.printStackTrace();} | |
245 | |
246 rf.registerSoundProcessor(auxListener, | |
247 new CompositeSoundProcessor(new Dancer(ear), aux)); | |
248 | |
249 } | |
250 } | |
251 | |
252 | |
253 public void simpleInitApp() { | |
254 this.setTimer(new IsoTimer(60)); | |
255 initAudio(); | |
256 | |
257 createScene(); | |
258 | |
259 prepareEar(ear1, 1); | |
260 prepareEar(ear2, 1); | |
261 prepareEar(ear3, 1); | |
262 | |
263 motionControl.play(); | |
264 } | |
265 | |
266 public void simpleUpdate(float tpf) { | |
267 if (music.getStatus() != AudioNode.Status.Playing){ | |
268 music.play(); | |
269 } | |
270 Vector3f loc = cam.getLocation(); | |
271 Quaternion rot = cam.getRotation(); | |
272 listener.setLocation(loc); | |
273 listener.setRotation(rot); | |
274 music.setLocalTranslation(bell.getLocalTranslation()); | |
275 } | |
276 | |
277 } |