view org/ear.org @ 15:19ff95c69cf5

moved send.c to org file
author Robert McIntyre <rlm@mit.edu>
date Thu, 03 Nov 2011 12:08:39 -0700
parents c41d773a85fb
children 1e201037f666
line wrap: on
line source
1 #+title: Simulated Sense of Hearing
2 #+author: Robert McIntyre
3 #+email: rlm@mit.edu
4 #+description: Simulating multiple listeners and the sense of hearing in jMonkeyEngine3
5 #+keywords: simulated hearing, openal, clojure, jMonkeyEngine3, LWJGL, AI
6 #+SETUPFILE: ../../aurellem/org/setup.org
7 #+INCLUDE: ../../aurellem/org/level-0.org
8 #+BABEL: :exports both :noweb yes :cache no :mkdirp yes
13 * Hearing
15 I want to be able to place ears in a similiar manner to how I place
16 the eyes. I want to be able to place ears in a unique spatial
17 position, and recieve as output at every tick the FFT of whatever
18 signals are happening at that point.
20 Hearing is one of the more difficult senses to simulate, because there
21 is less support for obtaining the actual sound data that is processed
22 by jMonkeyEngine3.
24 jMonkeyEngine's sound system works as follows:
26 - jMonkeyEngine uese the =AppSettings= for the particular application
27 to determine what sort of =AudioRenderer= should be used.
28 - although some support is provided for multiple AudioRendering
29 backends, jMonkeyEngine at the time of this writing will either
30 pick no AudioRender at all, or the =LwjglAudioRenderer=
31 - jMonkeyEngine tries to figure out what sort of system you're
32 running and extracts the appropiate native libraries.
33 - the =LwjglAudioRenderer= uses the [[http://lwjgl.org/][=LWJGL=]] (lightweight java game
34 library) bindings to interface with a C library called [[http://kcat.strangesoft.net/openal.html][=OpenAL=]]
35 - =OpenAL= calculates the 3D sound localization and feeds a stream of
36 sound to any of various sound output devices with which it knows
37 how to communicate.
39 A consequence of this is that there's no way to access the actual
40 sound data produced by =OpenAL=. Even worse, =OpanAL= only supports
41 one /listener/, which normally isn't a problem for games, but becomes
42 a problem when trying to make multiple AI creatures that can each hear
43 the world from a different perspective.
45 To make many AI creatures in jMonkeyEngine that can each hear the
46 world from their own perspective, it is necessary to go all the way
47 back to =OpenAL= and implement support for simulated hearing there.
49 ** =OpenAL= Devices
51 =OpenAL= goes to great lengths to support many different systems, all
52 with different sound capabilities and interfaces. It acomplishes this
53 difficult task by providing code for many different sound backends in
54 pseudo-objects called /Devices/. There's a device for the Linux Open
55 Sound System and the Advanced Linxu Sound Architechture, there's one
56 for Direct Sound on Windows, there's even one for Solaris. =OpenAL=
57 solves the problem of platform independence by providing all these
58 Devices.
60 Wrapper libraries such as LWJGL are free to examine the system on
61 which they are running and then select an appropiate device for that
62 system.
64 There are also a few "special" devices that don't interface with any
65 particular system. These include the Null Device, which doesn't do
66 anything, and the Wave Device, which writes whatever sound it recieves
67 to a file, if everything has been set up correctly when configuring
68 =OpenAL=.
70 Actual mixing of the sound data happens in the Devices, and they are
71 the only point in the sound rendering process where this data is
72 available.
74 Therefore, in order to support multiple listeners, and get the sound
75 data in a form that the AIs can use, it is necessary to create a new
76 Device, which supports this features.
79 ** The Send Device
80 Adding a device to OpenAL is rather tricky -- there are five separate
81 files in the =OpenAL= source tree that must be modified to do so. I've
82 documented this process [[./add-new-device.org][here]] for anyone who is interested.
84 #+srcname: send
85 #+begin_src C
86 #include "config.h"
87 #include <stdlib.h>
88 #include "alMain.h"
89 #include "AL/al.h"
90 #include "AL/alc.h"
91 #include "alSource.h"
92 #include <jni.h>
94 //////////////////// Summary
96 struct send_data;
97 struct context_data;
99 static void addContext(ALCdevice *, ALCcontext *);
100 static void syncContexts(ALCcontext *master, ALCcontext *slave);
101 static void syncSources(ALsource *master, ALsource *slave,
102 ALCcontext *masterCtx, ALCcontext *slaveCtx);
104 static void syncSourcei(ALuint master, ALuint slave,
105 ALCcontext *masterCtx, ALCcontext *ctx2, ALenum param);
106 static void syncSourcef(ALuint master, ALuint slave,
107 ALCcontext *masterCtx, ALCcontext *ctx2, ALenum param);
108 static void syncSource3f(ALuint master, ALuint slave,
109 ALCcontext *masterCtx, ALCcontext *ctx2, ALenum param);
111 static void swapInContext(ALCdevice *, struct context_data *);
112 static void saveContext(ALCdevice *, struct context_data *);
113 static void limitContext(ALCdevice *, ALCcontext *);
114 static void unLimitContext(ALCdevice *);
116 static void init(ALCdevice *);
117 static void renderData(ALCdevice *, int samples);
119 #define UNUSED(x) (void)(x)
121 //////////////////// State
123 typedef struct context_data {
124 ALfloat ClickRemoval[MAXCHANNELS];
125 ALfloat PendingClicks[MAXCHANNELS];
126 ALvoid *renderBuffer;
127 ALCcontext *ctx;
128 } context_data;
130 typedef struct send_data {
131 ALuint size;
132 context_data **contexts;
133 ALuint numContexts;
134 ALuint maxContexts;
135 } send_data;
139 //////////////////// Context Creation / Synchronization
141 #define _MAKE_SYNC(NAME, INIT_EXPR, GET_EXPR, SET_EXPR) \
142 void NAME (ALuint sourceID1, ALuint sourceID2, \
143 ALCcontext *ctx1, ALCcontext *ctx2, \
144 ALenum param){ \
145 INIT_EXPR; \
146 ALCcontext *current = alcGetCurrentContext(); \
147 alcMakeContextCurrent(ctx1); \
148 GET_EXPR; \
149 alcMakeContextCurrent(ctx2); \
150 SET_EXPR; \
151 alcMakeContextCurrent(current); \
152 }
154 #define MAKE_SYNC(NAME, TYPE, GET, SET) \
155 _MAKE_SYNC(NAME, \
156 TYPE value, \
157 GET(sourceID1, param, &value), \
158 SET(sourceID2, param, value))
160 #define MAKE_SYNC3(NAME, TYPE, GET, SET) \
161 _MAKE_SYNC(NAME, \
162 TYPE value1; TYPE value2; TYPE value3;, \
163 GET(sourceID1, param, &value1, &value2, &value3), \
164 SET(sourceID2, param, value1, value2, value3))
166 MAKE_SYNC( syncSourcei, ALint, alGetSourcei, alSourcei);
167 MAKE_SYNC( syncSourcef, ALfloat, alGetSourcef, alSourcef);
168 MAKE_SYNC3(syncSource3i, ALint, alGetSource3i, alSource3i);
169 MAKE_SYNC3(syncSource3f, ALfloat, alGetSource3f, alSource3f);
171 void syncSources(ALsource *masterSource, ALsource *slaveSource,
172 ALCcontext *masterCtx, ALCcontext *slaveCtx){
173 ALuint master = masterSource->source;
174 ALuint slave = slaveSource->source;
175 ALCcontext *current = alcGetCurrentContext();
177 syncSourcef(master,slave,masterCtx,slaveCtx,AL_PITCH);
178 syncSourcef(master,slave,masterCtx,slaveCtx,AL_GAIN);
179 syncSourcef(master,slave,masterCtx,slaveCtx,AL_MAX_DISTANCE);
180 syncSourcef(master,slave,masterCtx,slaveCtx,AL_ROLLOFF_FACTOR);
181 syncSourcef(master,slave,masterCtx,slaveCtx,AL_REFERENCE_DISTANCE);
182 syncSourcef(master,slave,masterCtx,slaveCtx,AL_MIN_GAIN);
183 syncSourcef(master,slave,masterCtx,slaveCtx,AL_MAX_GAIN);
184 syncSourcef(master,slave,masterCtx,slaveCtx,AL_CONE_OUTER_GAIN);
185 syncSourcef(master,slave,masterCtx,slaveCtx,AL_CONE_INNER_ANGLE);
186 syncSourcef(master,slave,masterCtx,slaveCtx,AL_CONE_OUTER_ANGLE);
187 syncSourcef(master,slave,masterCtx,slaveCtx,AL_SEC_OFFSET);
188 syncSourcef(master,slave,masterCtx,slaveCtx,AL_SAMPLE_OFFSET);
189 syncSourcef(master,slave,masterCtx,slaveCtx,AL_BYTE_OFFSET);
191 syncSource3f(master,slave,masterCtx,slaveCtx,AL_POSITION);
192 syncSource3f(master,slave,masterCtx,slaveCtx,AL_VELOCITY);
193 syncSource3f(master,slave,masterCtx,slaveCtx,AL_DIRECTION);
195 syncSourcei(master,slave,masterCtx,slaveCtx,AL_SOURCE_RELATIVE);
196 syncSourcei(master,slave,masterCtx,slaveCtx,AL_LOOPING);
198 alcMakeContextCurrent(masterCtx);
199 ALint source_type;
200 alGetSourcei(master, AL_SOURCE_TYPE, &source_type);
202 // Only static sources are currently synchronized!
203 if (AL_STATIC == source_type){
204 ALint master_buffer;
205 ALint slave_buffer;
206 alGetSourcei(master, AL_BUFFER, &master_buffer);
207 alcMakeContextCurrent(slaveCtx);
208 alGetSourcei(slave, AL_BUFFER, &slave_buffer);
209 if (master_buffer != slave_buffer){
210 alSourcei(slave, AL_BUFFER, master_buffer);
211 }
212 }
214 // Synchronize the state of the two sources.
215 alcMakeContextCurrent(masterCtx);
216 ALint masterState;
217 ALint slaveState;
219 alGetSourcei(master, AL_SOURCE_STATE, &masterState);
220 alcMakeContextCurrent(slaveCtx);
221 alGetSourcei(slave, AL_SOURCE_STATE, &slaveState);
223 if (masterState != slaveState){
224 switch (masterState){
225 case AL_INITIAL : alSourceRewind(slave); break;
226 case AL_PLAYING : alSourcePlay(slave); break;
227 case AL_PAUSED : alSourcePause(slave); break;
228 case AL_STOPPED : alSourceStop(slave); break;
229 }
230 }
231 // Restore whatever context was previously active.
232 alcMakeContextCurrent(current);
233 }
236 void syncContexts(ALCcontext *master, ALCcontext *slave){
237 /* If there aren't sufficient sources in slave to mirror
238 the sources in master, create them. */
239 ALCcontext *current = alcGetCurrentContext();
241 UIntMap *masterSourceMap = &(master->SourceMap);
242 UIntMap *slaveSourceMap = &(slave->SourceMap);
243 ALuint numMasterSources = masterSourceMap->size;
244 ALuint numSlaveSources = slaveSourceMap->size;
246 alcMakeContextCurrent(slave);
247 if (numSlaveSources < numMasterSources){
248 ALuint numMissingSources = numMasterSources - numSlaveSources;
249 ALuint newSources[numMissingSources];
250 alGenSources(numMissingSources, newSources);
251 }
253 /* Now, slave is gauranteed to have at least as many sources
254 as master. Sync each source from master to the corresponding
255 source in slave. */
256 int i;
257 for(i = 0; i < masterSourceMap->size; i++){
258 syncSources((ALsource*)masterSourceMap->array[i].value,
259 (ALsource*)slaveSourceMap->array[i].value,
260 master, slave);
261 }
262 alcMakeContextCurrent(current);
263 }
265 static void addContext(ALCdevice *Device, ALCcontext *context){
266 send_data *data = (send_data*)Device->ExtraData;
267 // expand array if necessary
268 if (data->numContexts >= data->maxContexts){
269 ALuint newMaxContexts = data->maxContexts*2 + 1;
270 data->contexts = realloc(data->contexts, newMaxContexts*sizeof(context_data));
271 data->maxContexts = newMaxContexts;
272 }
273 // create context_data and add it to the main array
274 context_data *ctxData;
275 ctxData = (context_data*)calloc(1, sizeof(*ctxData));
276 ctxData->renderBuffer =
277 malloc(BytesFromDevFmt(Device->FmtType) *
278 Device->NumChan * Device->UpdateSize);
279 ctxData->ctx = context;
281 data->contexts[data->numContexts] = ctxData;
282 data->numContexts++;
283 }
286 //////////////////// Context Switching
288 /* A device brings along with it two pieces of state
289 * which have to be swapped in and out with each context.
290 */
291 static void swapInContext(ALCdevice *Device, context_data *ctxData){
292 memcpy(Device->ClickRemoval, ctxData->ClickRemoval, sizeof(ALfloat)*MAXCHANNELS);
293 memcpy(Device->PendingClicks, ctxData->PendingClicks, sizeof(ALfloat)*MAXCHANNELS);
294 }
296 static void saveContext(ALCdevice *Device, context_data *ctxData){
297 memcpy(ctxData->ClickRemoval, Device->ClickRemoval, sizeof(ALfloat)*MAXCHANNELS);
298 memcpy(ctxData->PendingClicks, Device->PendingClicks, sizeof(ALfloat)*MAXCHANNELS);
299 }
301 static ALCcontext **currentContext;
302 static ALuint currentNumContext;
304 /* By default, all contexts are rendered at once for each call to aluMixData.
305 * This function uses the internals of the ALCdecice struct to temporarly
306 * cause aluMixData to only render the chosen context.
307 */
308 static void limitContext(ALCdevice *Device, ALCcontext *ctx){
309 currentContext = Device->Contexts;
310 currentNumContext = Device->NumContexts;
311 Device->Contexts = &ctx;
312 Device->NumContexts = 1;
313 }
315 static void unLimitContext(ALCdevice *Device){
316 Device->Contexts = currentContext;
317 Device->NumContexts = currentNumContext;
318 }
321 //////////////////// Main Device Loop
323 /* Establish the LWJGL context as the main context, which will
324 * be synchronized to all the slave contexts
325 */
326 static void init(ALCdevice *Device){
327 ALCcontext *masterContext = alcGetCurrentContext();
328 addContext(Device, masterContext);
329 }
332 static void renderData(ALCdevice *Device, int samples){
333 if(!Device->Connected){return;}
334 send_data *data = (send_data*)Device->ExtraData;
335 ALCcontext *current = alcGetCurrentContext();
337 ALuint i;
338 for (i = 1; i < data->numContexts; i++){
339 syncContexts(data->contexts[0]->ctx , data->contexts[i]->ctx);
340 }
342 if ((uint) samples > Device->UpdateSize){
343 printf("exceeding internal buffer size; dropping samples\n");
344 printf("requested %d; available %d\n", samples, Device->UpdateSize);
345 samples = (int) Device->UpdateSize;
346 }
348 for (i = 0; i < data->numContexts; i++){
349 context_data *ctxData = data->contexts[i];
350 ALCcontext *ctx = ctxData->ctx;
351 alcMakeContextCurrent(ctx);
352 limitContext(Device, ctx);
353 swapInContext(Device, ctxData);
354 aluMixData(Device, ctxData->renderBuffer, samples);
355 saveContext(Device, ctxData);
356 unLimitContext(Device);
357 }
358 alcMakeContextCurrent(current);
359 }
362 //////////////////// JNI Methods
364 #include "com_aurellem_send_AudioSend.h"
366 /*
367 * Class: com_aurellem_send_AudioSend
368 * Method: nstep
369 * Signature: (JI)V
370 */
371 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_nstep
372 (JNIEnv *env, jclass clazz, jlong device, jint samples){
373 UNUSED(env);UNUSED(clazz);UNUSED(device);
374 renderData((ALCdevice*)((intptr_t)device), samples);
375 }
377 /*
378 * Class: com_aurellem_send_AudioSend
379 * Method: ngetSamples
380 * Signature: (JLjava/nio/ByteBuffer;III)V
381 */
382 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_ngetSamples
383 (JNIEnv *env, jclass clazz, jlong device, jobject buffer, jint position,
384 jint samples, jint n){
385 UNUSED(clazz);
387 ALvoid *buffer_address =
388 ((ALbyte *)(((char*)(*env)->GetDirectBufferAddress(env, buffer)) + position));
389 ALCdevice *recorder = (ALCdevice*) ((intptr_t)device);
390 send_data *data = (send_data*)recorder->ExtraData;
391 if ((ALuint)n > data->numContexts){return;}
393 //printf("Want %d samples for listener %d\n", samples, n);
394 //printf("Device's format type is %d bytes per sample,\n",
395 // BytesFromDevFmt(recorder->FmtType));
396 //printf("and it has %d channels, making for %d requested bytes\n",
397 // recorder->NumChan,
398 // BytesFromDevFmt(recorder->FmtType) * recorder->NumChan * samples);
400 memcpy(buffer_address, data->contexts[n]->renderBuffer,
401 BytesFromDevFmt(recorder->FmtType) * recorder->NumChan * samples);
402 //samples*sizeof(ALfloat));
403 }
405 /*
406 * Class: com_aurellem_send_AudioSend
407 * Method: naddListener
408 * Signature: (J)V
409 */
410 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_naddListener
411 (JNIEnv *env, jclass clazz, jlong device){
412 UNUSED(env); UNUSED(clazz);
413 //printf("creating new context via naddListener\n");
414 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
415 ALCcontext *new = alcCreateContext(Device, NULL);
416 addContext(Device, new);
417 }
419 /*
420 * Class: com_aurellem_send_AudioSend
421 * Method: nsetNthListener3f
422 * Signature: (IFFFJI)V
423 */
424 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_nsetNthListener3f
425 (JNIEnv *env, jclass clazz, jint param,
426 jfloat v1, jfloat v2, jfloat v3, jlong device, jint contextNum){
427 UNUSED(env);UNUSED(clazz);
429 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
430 send_data *data = (send_data*)Device->ExtraData;
432 ALCcontext *current = alcGetCurrentContext();
433 if ((ALuint)contextNum > data->numContexts){return;}
434 alcMakeContextCurrent(data->contexts[contextNum]->ctx);
435 alListener3f(param, v1, v2, v3);
436 alcMakeContextCurrent(current);
437 }
439 /*
440 * Class: com_aurellem_send_AudioSend
441 * Method: nsetNthListenerf
442 * Signature: (IFJI)V
443 */
444 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_nsetNthListenerf
445 (JNIEnv *env, jclass clazz, jint param, jfloat v1, jlong device,
446 jint contextNum){
448 UNUSED(env);UNUSED(clazz);
450 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
451 send_data *data = (send_data*)Device->ExtraData;
453 ALCcontext *current = alcGetCurrentContext();
454 if ((ALuint)contextNum > data->numContexts){return;}
455 alcMakeContextCurrent(data->contexts[contextNum]->ctx);
456 alListenerf(param, v1);
457 alcMakeContextCurrent(current);
458 }
460 /*
461 * Class: com_aurellem_send_AudioSend
462 * Method: ninitDevice
463 * Signature: (J)V
464 */
465 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_ninitDevice
466 (JNIEnv *env, jclass clazz, jlong device){
467 UNUSED(env);UNUSED(clazz);
469 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
470 init(Device);
472 }
475 /*
476 * Class: com_aurellem_send_AudioSend
477 * Method: ngetAudioFormat
478 * Signature: (J)Ljavax/sound/sampled/AudioFormat;
479 */
480 JNIEXPORT jobject JNICALL Java_com_aurellem_send_AudioSend_ngetAudioFormat
481 (JNIEnv *env, jclass clazz, jlong device){
482 UNUSED(clazz);
483 jclass AudioFormatClass =
484 (*env)->FindClass(env, "javax/sound/sampled/AudioFormat");
485 jmethodID AudioFormatConstructor =
486 (*env)->GetMethodID(env, AudioFormatClass, "<init>", "(FIIZZ)V");
488 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
490 //float frequency
492 int isSigned;
493 switch (Device->FmtType)
494 {
495 case DevFmtUByte:
496 case DevFmtUShort: isSigned = 0; break;
497 default : isSigned = 1;
498 }
499 float frequency = Device->Frequency;
500 int bitsPerFrame = (8 * BytesFromDevFmt(Device->FmtType));
501 int channels = Device->NumChan;
504 //printf("freq = %f, bpf = %d, channels = %d, signed? = %d\n",
505 // frequency, bitsPerFrame, channels, isSigned);
507 jobject format = (*env)->
508 NewObject(
509 env,AudioFormatClass,AudioFormatConstructor,
510 frequency,
511 bitsPerFrame,
512 channels,
513 isSigned,
514 0);
515 return format;
516 }
520 //////////////////// Device Initilization / Management
522 static const ALCchar sendDevice[] = "Multiple Audio Send";
524 static ALCboolean send_open_playback(ALCdevice *device,
525 const ALCchar *deviceName)
526 {
527 send_data *data;
528 // stop any buffering for stdout, so that I can
529 // see the printf statements in my terminal immediatley
530 setbuf(stdout, NULL);
532 if(!deviceName)
533 deviceName = sendDevice;
534 else if(strcmp(deviceName, sendDevice) != 0)
535 return ALC_FALSE;
536 data = (send_data*)calloc(1, sizeof(*data));
537 device->szDeviceName = strdup(deviceName);
538 device->ExtraData = data;
539 return ALC_TRUE;
540 }
542 static void send_close_playback(ALCdevice *device)
543 {
544 send_data *data = (send_data*)device->ExtraData;
545 alcMakeContextCurrent(NULL);
546 ALuint i;
547 // Destroy all slave contexts. LWJGL will take care of
548 // its own context.
549 for (i = 1; i < data->numContexts; i++){
550 context_data *ctxData = data->contexts[i];
551 alcDestroyContext(ctxData->ctx);
552 free(ctxData->renderBuffer);
553 free(ctxData);
554 }
555 free(data);
556 device->ExtraData = NULL;
557 }
559 static ALCboolean send_reset_playback(ALCdevice *device)
560 {
561 SetDefaultWFXChannelOrder(device);
562 return ALC_TRUE;
563 }
565 static void send_stop_playback(ALCdevice *Device){
566 UNUSED(Device);
567 }
569 static const BackendFuncs send_funcs = {
570 send_open_playback,
571 send_close_playback,
572 send_reset_playback,
573 send_stop_playback,
574 NULL,
575 NULL, /* These would be filled with functions to */
576 NULL, /* handle capturing audio if we we into that */
577 NULL, /* sort of thing... */
578 NULL,
579 NULL
580 };
582 ALCboolean alc_send_init(BackendFuncs *func_list){
583 *func_list = send_funcs;
584 return ALC_TRUE;
585 }
587 void alc_send_deinit(void){}
589 void alc_send_probe(enum DevProbe type)
590 {
591 switch(type)
592 {
593 case DEVICE_PROBE:
594 AppendDeviceList(sendDevice);
595 break;
596 case ALL_DEVICE_PROBE:
597 AppendAllDeviceList(sendDevice);
598 break;
599 case CAPTURE_DEVICE_PROBE:
600 break;
601 }
602 }
603 #+end_src
612 #+srcname: ears
613 #+begin_src clojure
614 (ns cortex.hearing)
615 (use 'cortex.world)
616 (use 'cortex.import)
617 (use 'clojure.contrib.def)
618 (cortex.import/mega-import-jme3)
619 (rlm.rlm-commands/help)
620 (import java.nio.ByteBuffer)
621 (import java.awt.image.BufferedImage)
622 (import java.awt.Color)
623 (import java.awt.Dimension)
624 (import java.awt.Graphics)
625 (import java.awt.Graphics2D)
626 (import java.awt.event.WindowAdapter)
627 (import java.awt.event.WindowEvent)
628 (import java.awt.image.BufferedImage)
629 (import java.nio.ByteBuffer)
630 (import javax.swing.JFrame)
631 (import javax.swing.JPanel)
632 (import javax.swing.SwingUtilities)
633 (import javax.swing.ImageIcon)
634 (import javax.swing.JOptionPane)
635 (import java.awt.image.ImageObserver)
637 (import 'com.jme3.capture.SoundProcessor)
640 (defn sound-processor
641 "deals with converting ByteBuffers into Arrays of bytes so that the
642 continuation functions can be defined in terms of immutable stuff."
643 [continuation]
644 (proxy [SoundProcessor] []
645 (cleanup [])
646 (process
647 [#^ByteBuffer audioSamples numSamples]
648 (no-exceptions
649 (let [byte-array (byte-array numSamples)]
650 (.get audioSamples byte-array 0 numSamples)
651 (continuation
652 (vec byte-array)))))))
654 (defn add-ear
655 "add an ear to the world. The continuation function will be called
656 on the FFT or the sounds which the ear hears in the given
657 timeframe. Sound is 3D."
658 [world listener continuation]
659 (let [renderer (.getAudioRenderer world)]
660 (.addListener renderer listener)
661 (.registerSoundProcessor renderer listener
662 (sound-processor continuation))
663 listener))
665 #+end_src
669 #+srcname: test-hearing
670 #+begin_src clojure :results silent
671 (ns test.hearing)
672 (use 'cortex.world)
673 (use 'cortex.import)
674 (use 'clojure.contrib.def)
675 (use 'body.ear)
676 (cortex.import/mega-import-jme3)
677 (rlm.rlm-commands/help)
679 (defn setup-fn [world]
680 (let [listener (Listener.)]
681 (add-ear world listener #(println (nth % 0)))))
683 (defn play-sound [node world value]
684 (if (not value)
685 (do
686 (.playSource (.getAudioRenderer world) node))))
688 (defn test-world []
689 (let [node1 (AudioNode. (asset-manager) "Sounds/pure.wav" false false)]
690 (world
691 (Node.)
692 {"key-space" (partial play-sound node1)}
693 setup-fn
694 no-op
695 )))
698 #+end_src
702 * Example
704 * COMMENT Code Generation
706 #+begin_src clojure :tangle ../../cortex/src/cortex/hearing.clj
707 <<ears>>
708 #+end_src
710 #+begin_src clojure :tangle ../../cortex/src/test/hearing.clj
711 <<test-hearing>>
712 #+end_src
715 #+begin_src C :tangle ../Alc/backends/send.c
716 <<send>>
717 #+end_src