view org/ear.org @ 18:1e201037f666

separating out the sections of send.c
author Robert McIntyre <rlm@mit.edu>
date Thu, 03 Nov 2011 13:32:27 -0700
parents 19ff95c69cf5
children 22ac5a0367cd
line wrap: on
line source
1 #+title: Simulated Sense of Hearing
2 #+author: Robert McIntyre
3 #+email: rlm@mit.edu
4 #+description: Simulating multiple listeners and the sense of hearing in jMonkeyEngine3
5 #+keywords: simulated hearing, openal, clojure, jMonkeyEngine3, LWJGL, AI
6 #+SETUPFILE: ../../aurellem/org/setup.org
7 #+INCLUDE: ../../aurellem/org/level-0.org
8 #+BABEL: :exports both :noweb yes :cache no :mkdirp yes
13 * Hearing
15 I want to be able to place ears in a similiar manner to how I place
16 the eyes. I want to be able to place ears in a unique spatial
17 position, and recieve as output at every tick the FFT of whatever
18 signals are happening at that point.
20 Hearing is one of the more difficult senses to simulate, because there
21 is less support for obtaining the actual sound data that is processed
22 by jMonkeyEngine3.
24 jMonkeyEngine's sound system works as follows:
26 - jMonkeyEngine uese the =AppSettings= for the particular application
27 to determine what sort of =AudioRenderer= should be used.
28 - although some support is provided for multiple AudioRendering
29 backends, jMonkeyEngine at the time of this writing will either
30 pick no AudioRender at all, or the =LwjglAudioRenderer=
31 - jMonkeyEngine tries to figure out what sort of system you're
32 running and extracts the appropiate native libraries.
33 - the =LwjglAudioRenderer= uses the [[http://lwjgl.org/][=LWJGL=]] (LightWeight Java Game
34 Library) bindings to interface with a C library called [[http://kcat.strangesoft.net/openal.html][=OpenAL=]]
35 - =OpenAL= calculates the 3D sound localization and feeds a stream of
36 sound to any of various sound output devices with which it knows
37 how to communicate.
39 A consequence of this is that there's no way to access the actual
40 sound data produced by =OpenAL=. Even worse, =OpanAL= only supports
41 one /listener/, which normally isn't a problem for games, but becomes
42 a problem when trying to make multiple AI creatures that can each hear
43 the world from a different perspective.
45 To make many AI creatures in jMonkeyEngine that can each hear the
46 world from their own perspective, it is necessary to go all the way
47 back to =OpenAL= and implement support for simulated hearing there.
49 ** =OpenAL= Devices
51 =OpenAL= goes to great lengths to support many different systems, all
52 with different sound capabilities and interfaces. It acomplishes this
53 difficult task by providing code for many different sound backends in
54 pseudo-objects called /Devices/. There's a device for the Linux Open
55 Sound System and the Advanced Linxu Sound Architechture, there's one
56 for Direct Sound on Windows, there's even one for Solaris. =OpenAL=
57 solves the problem of platform independence by providing all these
58 Devices.
60 Wrapper libraries such as LWJGL are free to examine the system on
61 which they are running and then select an appropiate device for that
62 system.
64 There are also a few "special" devices that don't interface with any
65 particular system. These include the Null Device, which doesn't do
66 anything, and the Wave Device, which writes whatever sound it recieves
67 to a file, if everything has been set up correctly when configuring
68 =OpenAL=.
70 Actual mixing of the sound data happens in the Devices, and they are
71 the only point in the sound rendering process where this data is
72 available.
74 Therefore, in order to support multiple listeners, and get the sound
75 data in a form that the AIs can use, it is necessary to create a new
76 Device, which supports this features.
79 ** The Send Device
80 Adding a device to OpenAL is rather tricky -- there are five separate
81 files in the =OpenAL= source tree that must be modified to do so. I've
82 documented this process [[./add-new-device.org][here]] for anyone who is interested.
85 Onward to that actual Device!
87 again, my objectives are:
89 - Support Multiple Listeners from jMonkeyEngine3
90 - Get access to the rendered sound data for further processing from
91 clojure.
93 ** =send.c=
95 ** Header
96 #+srcname: send-header
97 #+begin_src C
98 #include "config.h"
99 #include <stdlib.h>
100 #include "alMain.h"
101 #include "AL/al.h"
102 #include "AL/alc.h"
103 #include "alSource.h"
104 #include <jni.h>
106 //////////////////// Summary
108 struct send_data;
109 struct context_data;
111 static void addContext(ALCdevice *, ALCcontext *);
112 static void syncContexts(ALCcontext *master, ALCcontext *slave);
113 static void syncSources(ALsource *master, ALsource *slave,
114 ALCcontext *masterCtx, ALCcontext *slaveCtx);
116 static void syncSourcei(ALuint master, ALuint slave,
117 ALCcontext *masterCtx, ALCcontext *ctx2, ALenum param);
118 static void syncSourcef(ALuint master, ALuint slave,
119 ALCcontext *masterCtx, ALCcontext *ctx2, ALenum param);
120 static void syncSource3f(ALuint master, ALuint slave,
121 ALCcontext *masterCtx, ALCcontext *ctx2, ALenum param);
123 static void swapInContext(ALCdevice *, struct context_data *);
124 static void saveContext(ALCdevice *, struct context_data *);
125 static void limitContext(ALCdevice *, ALCcontext *);
126 static void unLimitContext(ALCdevice *);
128 static void init(ALCdevice *);
129 static void renderData(ALCdevice *, int samples);
131 #define UNUSED(x) (void)(x)
132 #+end_src
134 The main idea behing the Send device is to take advantage of the fact
135 that LWJGL only manages one /context/ when using OpenAL. A /context/
136 is like a container that holds samples and keeps track of where the
137 listener is. In order to support multiple listeners, the Send device
138 identifies the LWJGL context as the master context, and creates any
139 number of slave contexts to represent additional listeners. Every
140 time the device renders sound, it synchronizes every source from the
141 master LWJGL context to the slave contexts. Then, it renders each
142 context separately, using a different listener for each one. The
143 rendered sound is made available via JNI to jMonkeyEngine.
145 To recap, the process is:
146 - Set the LWJGL context as "master" in the =init()= method.
147 - Create any number of additional contexts via =addContext()=
148 - At every call to =renderData()= sync the master context with the
149 slave contexts vit =syncContexts()=
150 - =syncContexts()= calls =syncSources()= to sync all the sources
151 which are in the master context.
152 - =limitContext()= and =unLimitContext()= make it possible to render
153 only one context at a time.
155 ** Necessary State
156 #+begin_src C
157 //////////////////// State
159 typedef struct context_data {
160 ALfloat ClickRemoval[MAXCHANNELS];
161 ALfloat PendingClicks[MAXCHANNELS];
162 ALvoid *renderBuffer;
163 ALCcontext *ctx;
164 } context_data;
166 typedef struct send_data {
167 ALuint size;
168 context_data **contexts;
169 ALuint numContexts;
170 ALuint maxContexts;
171 } send_data;
172 #+end_src
174 Switching between contexts is not the normal operation of a Device,
175 and one of the problems with doing so is that a Device normally keeps
176 around a few pieces of state such as the =ClickRemoval= array above
177 which will become corrupted if the contexts are not done in
178 parallel. The solution is to create a copy of this normally global
179 device state for each context, and copy it back and forth into and out
180 of the actual device state whenever a context is rendered.
182 ** Synchronization Macros
184 #+begin_src C
185 //////////////////// Context Creation / Synchronization
187 #define _MAKE_SYNC(NAME, INIT_EXPR, GET_EXPR, SET_EXPR) \
188 void NAME (ALuint sourceID1, ALuint sourceID2, \
189 ALCcontext *ctx1, ALCcontext *ctx2, \
190 ALenum param){ \
191 INIT_EXPR; \
192 ALCcontext *current = alcGetCurrentContext(); \
193 alcMakeContextCurrent(ctx1); \
194 GET_EXPR; \
195 alcMakeContextCurrent(ctx2); \
196 SET_EXPR; \
197 alcMakeContextCurrent(current); \
198 }
200 #define MAKE_SYNC(NAME, TYPE, GET, SET) \
201 _MAKE_SYNC(NAME, \
202 TYPE value, \
203 GET(sourceID1, param, &value), \
204 SET(sourceID2, param, value))
206 #define MAKE_SYNC3(NAME, TYPE, GET, SET) \
207 _MAKE_SYNC(NAME, \
208 TYPE value1; TYPE value2; TYPE value3;, \
209 GET(sourceID1, param, &value1, &value2, &value3), \
210 SET(sourceID2, param, value1, value2, value3))
212 MAKE_SYNC( syncSourcei, ALint, alGetSourcei, alSourcei);
213 MAKE_SYNC( syncSourcef, ALfloat, alGetSourcef, alSourcef);
214 MAKE_SYNC3(syncSource3i, ALint, alGetSource3i, alSource3i);
215 MAKE_SYNC3(syncSource3f, ALfloat, alGetSource3f, alSource3f);
217 #+end_src
219 Setting the state of an =OpenAl= source is done with the =alSourcei=,
220 =alSourcef=, =alSource3i=, and =alSource3f= functions. In order to
221 complely synchronize two sources, it is necessary to use all of
222 them. These macros help to condense the otherwise repetitive
223 synchronization code involving these simillar low-level =OpenAL= functions.
225 ** Source Synchronization
226 #+begin_src C
227 void syncSources(ALsource *masterSource, ALsource *slaveSource,
228 ALCcontext *masterCtx, ALCcontext *slaveCtx){
229 ALuint master = masterSource->source;
230 ALuint slave = slaveSource->source;
231 ALCcontext *current = alcGetCurrentContext();
233 syncSourcef(master,slave,masterCtx,slaveCtx,AL_PITCH);
234 syncSourcef(master,slave,masterCtx,slaveCtx,AL_GAIN);
235 syncSourcef(master,slave,masterCtx,slaveCtx,AL_MAX_DISTANCE);
236 syncSourcef(master,slave,masterCtx,slaveCtx,AL_ROLLOFF_FACTOR);
237 syncSourcef(master,slave,masterCtx,slaveCtx,AL_REFERENCE_DISTANCE);
238 syncSourcef(master,slave,masterCtx,slaveCtx,AL_MIN_GAIN);
239 syncSourcef(master,slave,masterCtx,slaveCtx,AL_MAX_GAIN);
240 syncSourcef(master,slave,masterCtx,slaveCtx,AL_CONE_OUTER_GAIN);
241 syncSourcef(master,slave,masterCtx,slaveCtx,AL_CONE_INNER_ANGLE);
242 syncSourcef(master,slave,masterCtx,slaveCtx,AL_CONE_OUTER_ANGLE);
243 syncSourcef(master,slave,masterCtx,slaveCtx,AL_SEC_OFFSET);
244 syncSourcef(master,slave,masterCtx,slaveCtx,AL_SAMPLE_OFFSET);
245 syncSourcef(master,slave,masterCtx,slaveCtx,AL_BYTE_OFFSET);
247 syncSource3f(master,slave,masterCtx,slaveCtx,AL_POSITION);
248 syncSource3f(master,slave,masterCtx,slaveCtx,AL_VELOCITY);
249 syncSource3f(master,slave,masterCtx,slaveCtx,AL_DIRECTION);
251 syncSourcei(master,slave,masterCtx,slaveCtx,AL_SOURCE_RELATIVE);
252 syncSourcei(master,slave,masterCtx,slaveCtx,AL_LOOPING);
254 alcMakeContextCurrent(masterCtx);
255 ALint source_type;
256 alGetSourcei(master, AL_SOURCE_TYPE, &source_type);
258 // Only static sources are currently synchronized!
259 if (AL_STATIC == source_type){
260 ALint master_buffer;
261 ALint slave_buffer;
262 alGetSourcei(master, AL_BUFFER, &master_buffer);
263 alcMakeContextCurrent(slaveCtx);
264 alGetSourcei(slave, AL_BUFFER, &slave_buffer);
265 if (master_buffer != slave_buffer){
266 alSourcei(slave, AL_BUFFER, master_buffer);
267 }
268 }
270 // Synchronize the state of the two sources.
271 alcMakeContextCurrent(masterCtx);
272 ALint masterState;
273 ALint slaveState;
275 alGetSourcei(master, AL_SOURCE_STATE, &masterState);
276 alcMakeContextCurrent(slaveCtx);
277 alGetSourcei(slave, AL_SOURCE_STATE, &slaveState);
279 if (masterState != slaveState){
280 switch (masterState){
281 case AL_INITIAL : alSourceRewind(slave); break;
282 case AL_PLAYING : alSourcePlay(slave); break;
283 case AL_PAUSED : alSourcePause(slave); break;
284 case AL_STOPPED : alSourceStop(slave); break;
285 }
286 }
287 // Restore whatever context was previously active.
288 alcMakeContextCurrent(current);
289 }
290 #+end_src
291 This function is long because it has to exaustively go through all the
292 possible state that a source can have and make sure that it is the
293 same between the master and slave sources. I'd like to take this
294 moment to salute the [[http://connect.creativelabs.com/openal/Documentation/Forms/AllItems.aspx][=OpenAL= Reference Manual]], which provides a very
295 good description of =OpenAL='s internals.
297 ** Context Synchronization
298 #+begin_src C
299 void syncContexts(ALCcontext *master, ALCcontext *slave){
300 /* If there aren't sufficient sources in slave to mirror
301 the sources in master, create them. */
302 ALCcontext *current = alcGetCurrentContext();
304 UIntMap *masterSourceMap = &(master->SourceMap);
305 UIntMap *slaveSourceMap = &(slave->SourceMap);
306 ALuint numMasterSources = masterSourceMap->size;
307 ALuint numSlaveSources = slaveSourceMap->size;
309 alcMakeContextCurrent(slave);
310 if (numSlaveSources < numMasterSources){
311 ALuint numMissingSources = numMasterSources - numSlaveSources;
312 ALuint newSources[numMissingSources];
313 alGenSources(numMissingSources, newSources);
314 }
316 /* Now, slave is gauranteed to have at least as many sources
317 as master. Sync each source from master to the corresponding
318 source in slave. */
319 int i;
320 for(i = 0; i < masterSourceMap->size; i++){
321 syncSources((ALsource*)masterSourceMap->array[i].value,
322 (ALsource*)slaveSourceMap->array[i].value,
323 master, slave);
324 }
325 alcMakeContextCurrent(current);
326 }
327 #+end_src
329 Most of the hard work in Context Synchronization is done in
330 =syncSources()=. The only thing that =syncContexts()= has to worry
331 about is automoatically creating new sources whenever a slave context
332 does not have the same number of sources as the master context.
334 * Context Creation
335 #+begin_src C
336 static void addContext(ALCdevice *Device, ALCcontext *context){
337 send_data *data = (send_data*)Device->ExtraData;
338 // expand array if necessary
339 if (data->numContexts >= data->maxContexts){
340 ALuint newMaxContexts = data->maxContexts*2 + 1;
341 data->contexts = realloc(data->contexts, newMaxContexts*sizeof(context_data));
342 data->maxContexts = newMaxContexts;
343 }
344 // create context_data and add it to the main array
345 context_data *ctxData;
346 ctxData = (context_data*)calloc(1, sizeof(*ctxData));
347 ctxData->renderBuffer =
348 malloc(BytesFromDevFmt(Device->FmtType) *
349 Device->NumChan * Device->UpdateSize);
350 ctxData->ctx = context;
352 data->contexts[data->numContexts] = ctxData;
353 data->numContexts++;
354 }
355 #+end_src
357 Here, the slave context is created, and it's data is stored in the
358 device-wide =ExtraData= structure. The =renderBuffer= that is created
359 here is where the rendered sound samples for this slave context will
360 eventually go.
362 * Context Switching
363 #+begin_src C
364 //////////////////// Context Switching
366 /* A device brings along with it two pieces of state
367 * which have to be swapped in and out with each context.
368 */
369 static void swapInContext(ALCdevice *Device, context_data *ctxData){
370 memcpy(Device->ClickRemoval, ctxData->ClickRemoval, sizeof(ALfloat)*MAXCHANNELS);
371 memcpy(Device->PendingClicks, ctxData->PendingClicks, sizeof(ALfloat)*MAXCHANNELS);
372 }
374 static void saveContext(ALCdevice *Device, context_data *ctxData){
375 memcpy(ctxData->ClickRemoval, Device->ClickRemoval, sizeof(ALfloat)*MAXCHANNELS);
376 memcpy(ctxData->PendingClicks, Device->PendingClicks, sizeof(ALfloat)*MAXCHANNELS);
377 }
379 static ALCcontext **currentContext;
380 static ALuint currentNumContext;
382 /* By default, all contexts are rendered at once for each call to aluMixData.
383 * This function uses the internals of the ALCdecice struct to temporarly
384 * cause aluMixData to only render the chosen context.
385 */
386 static void limitContext(ALCdevice *Device, ALCcontext *ctx){
387 currentContext = Device->Contexts;
388 currentNumContext = Device->NumContexts;
389 Device->Contexts = &ctx;
390 Device->NumContexts = 1;
391 }
393 static void unLimitContext(ALCdevice *Device){
394 Device->Contexts = currentContext;
395 Device->NumContexts = currentNumContext;
396 }
397 #+end_src
399 =OpenAL= normally reneders all Contexts in parallel, outputting the
400 whole result to the buffer. It does this by iterating over the
401 Device->Contexts array and rendering each context to the buffer in
402 turn. By temporarly setting Device->NumContexts to 1 and adjusting
403 the Device's context list to put the desired context-to-be-rendered
404 into position 0, we can get trick =OpenAL= into rendering each slave
405 context separate from all the others.
407 ** Main Device Loop
408 #+begin_src C
409 //////////////////// Main Device Loop
411 /* Establish the LWJGL context as the master context, which will
412 * be synchronized to all the slave contexts
413 */
414 static void init(ALCdevice *Device){
415 ALCcontext *masterContext = alcGetCurrentContext();
416 addContext(Device, masterContext);
417 }
420 static void renderData(ALCdevice *Device, int samples){
421 if(!Device->Connected){return;}
422 send_data *data = (send_data*)Device->ExtraData;
423 ALCcontext *current = alcGetCurrentContext();
425 ALuint i;
426 for (i = 1; i < data->numContexts; i++){
427 syncContexts(data->contexts[0]->ctx , data->contexts[i]->ctx);
428 }
430 if ((uint) samples > Device->UpdateSize){
431 printf("exceeding internal buffer size; dropping samples\n");
432 printf("requested %d; available %d\n", samples, Device->UpdateSize);
433 samples = (int) Device->UpdateSize;
434 }
436 for (i = 0; i < data->numContexts; i++){
437 context_data *ctxData = data->contexts[i];
438 ALCcontext *ctx = ctxData->ctx;
439 alcMakeContextCurrent(ctx);
440 limitContext(Device, ctx);
441 swapInContext(Device, ctxData);
442 aluMixData(Device, ctxData->renderBuffer, samples);
443 saveContext(Device, ctxData);
444 unLimitContext(Device);
445 }
446 alcMakeContextCurrent(current);
447 }
448 #+end_src
450 The main loop synchronizes the master LWJGL context with all the slave
451 contexts, then walks each context, rendering just that context to it's
452 audio-sample storage buffer.
454 * JNI Methods
455 #+begin_src C
456 //////////////////// JNI Methods
458 #include "com_aurellem_send_AudioSend.h"
460 /*
461 * Class: com_aurellem_send_AudioSend
462 * Method: nstep
463 * Signature: (JI)V
464 */
465 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_nstep
466 (JNIEnv *env, jclass clazz, jlong device, jint samples){
467 UNUSED(env);UNUSED(clazz);UNUSED(device);
468 renderData((ALCdevice*)((intptr_t)device), samples);
469 }
471 /*
472 * Class: com_aurellem_send_AudioSend
473 * Method: ngetSamples
474 * Signature: (JLjava/nio/ByteBuffer;III)V
475 */
476 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_ngetSamples
477 (JNIEnv *env, jclass clazz, jlong device, jobject buffer, jint position,
478 jint samples, jint n){
479 UNUSED(clazz);
481 ALvoid *buffer_address =
482 ((ALbyte *)(((char*)(*env)->GetDirectBufferAddress(env, buffer)) + position));
483 ALCdevice *recorder = (ALCdevice*) ((intptr_t)device);
484 send_data *data = (send_data*)recorder->ExtraData;
485 if ((ALuint)n > data->numContexts){return;}
486 memcpy(buffer_address, data->contexts[n]->renderBuffer,
487 BytesFromDevFmt(recorder->FmtType) * recorder->NumChan * samples);
488 }
490 /*
491 * Class: com_aurellem_send_AudioSend
492 * Method: naddListener
493 * Signature: (J)V
494 */
495 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_naddListener
496 (JNIEnv *env, jclass clazz, jlong device){
497 UNUSED(env); UNUSED(clazz);
498 //printf("creating new context via naddListener\n");
499 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
500 ALCcontext *new = alcCreateContext(Device, NULL);
501 addContext(Device, new);
502 }
504 /*
505 * Class: com_aurellem_send_AudioSend
506 * Method: nsetNthListener3f
507 * Signature: (IFFFJI)V
508 */
509 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_nsetNthListener3f
510 (JNIEnv *env, jclass clazz, jint param,
511 jfloat v1, jfloat v2, jfloat v3, jlong device, jint contextNum){
512 UNUSED(env);UNUSED(clazz);
514 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
515 send_data *data = (send_data*)Device->ExtraData;
517 ALCcontext *current = alcGetCurrentContext();
518 if ((ALuint)contextNum > data->numContexts){return;}
519 alcMakeContextCurrent(data->contexts[contextNum]->ctx);
520 alListener3f(param, v1, v2, v3);
521 alcMakeContextCurrent(current);
522 }
524 /*
525 * Class: com_aurellem_send_AudioSend
526 * Method: nsetNthListenerf
527 * Signature: (IFJI)V
528 */
529 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_nsetNthListenerf
530 (JNIEnv *env, jclass clazz, jint param, jfloat v1, jlong device,
531 jint contextNum){
533 UNUSED(env);UNUSED(clazz);
535 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
536 send_data *data = (send_data*)Device->ExtraData;
538 ALCcontext *current = alcGetCurrentContext();
539 if ((ALuint)contextNum > data->numContexts){return;}
540 alcMakeContextCurrent(data->contexts[contextNum]->ctx);
541 alListenerf(param, v1);
542 alcMakeContextCurrent(current);
543 }
545 /*
546 * Class: com_aurellem_send_AudioSend
547 * Method: ninitDevice
548 * Signature: (J)V
549 */
550 JNIEXPORT void JNICALL Java_com_aurellem_send_AudioSend_ninitDevice
551 (JNIEnv *env, jclass clazz, jlong device){
552 UNUSED(env);UNUSED(clazz);
554 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
555 init(Device);
557 }
560 /*
561 * Class: com_aurellem_send_AudioSend
562 * Method: ngetAudioFormat
563 * Signature: (J)Ljavax/sound/sampled/AudioFormat;
564 */
565 JNIEXPORT jobject JNICALL Java_com_aurellem_send_AudioSend_ngetAudioFormat
566 (JNIEnv *env, jclass clazz, jlong device){
567 UNUSED(clazz);
568 jclass AudioFormatClass =
569 (*env)->FindClass(env, "javax/sound/sampled/AudioFormat");
570 jmethodID AudioFormatConstructor =
571 (*env)->GetMethodID(env, AudioFormatClass, "<init>", "(FIIZZ)V");
573 ALCdevice *Device = (ALCdevice*) ((intptr_t)device);
575 //float frequency
577 int isSigned;
578 switch (Device->FmtType)
579 {
580 case DevFmtUByte:
581 case DevFmtUShort: isSigned = 0; break;
582 default : isSigned = 1;
583 }
584 float frequency = Device->Frequency;
585 int bitsPerFrame = (8 * BytesFromDevFmt(Device->FmtType));
586 int channels = Device->NumChan;
589 //printf("freq = %f, bpf = %d, channels = %d, signed? = %d\n",
590 // frequency, bitsPerFrame, channels, isSigned);
592 jobject format = (*env)->
593 NewObject(
594 env,AudioFormatClass,AudioFormatConstructor,
595 frequency,
596 bitsPerFrame,
597 channels,
598 isSigned,
599 0);
600 return format;
601 }
603 //////////////////// Device Initilization / Management
605 static const ALCchar sendDevice[] = "Multiple Audio Send";
607 static ALCboolean send_open_playback(ALCdevice *device,
608 const ALCchar *deviceName)
609 {
610 send_data *data;
611 // stop any buffering for stdout, so that I can
612 // see the printf statements in my terminal immediatley
613 setbuf(stdout, NULL);
615 if(!deviceName)
616 deviceName = sendDevice;
617 else if(strcmp(deviceName, sendDevice) != 0)
618 return ALC_FALSE;
619 data = (send_data*)calloc(1, sizeof(*data));
620 device->szDeviceName = strdup(deviceName);
621 device->ExtraData = data;
622 return ALC_TRUE;
623 }
625 static void send_close_playback(ALCdevice *device)
626 {
627 send_data *data = (send_data*)device->ExtraData;
628 alcMakeContextCurrent(NULL);
629 ALuint i;
630 // Destroy all slave contexts. LWJGL will take care of
631 // its own context.
632 for (i = 1; i < data->numContexts; i++){
633 context_data *ctxData = data->contexts[i];
634 alcDestroyContext(ctxData->ctx);
635 free(ctxData->renderBuffer);
636 free(ctxData);
637 }
638 free(data);
639 device->ExtraData = NULL;
640 }
642 static ALCboolean send_reset_playback(ALCdevice *device)
643 {
644 SetDefaultWFXChannelOrder(device);
645 return ALC_TRUE;
646 }
648 static void send_stop_playback(ALCdevice *Device){
649 UNUSED(Device);
650 }
652 static const BackendFuncs send_funcs = {
653 send_open_playback,
654 send_close_playback,
655 send_reset_playback,
656 send_stop_playback,
657 NULL,
658 NULL, /* These would be filled with functions to */
659 NULL, /* handle capturing audio if we we into that */
660 NULL, /* sort of thing... */
661 NULL,
662 NULL
663 };
665 ALCboolean alc_send_init(BackendFuncs *func_list){
666 *func_list = send_funcs;
667 return ALC_TRUE;
668 }
670 void alc_send_deinit(void){}
672 void alc_send_probe(enum DevProbe type)
673 {
674 switch(type)
675 {
676 case DEVICE_PROBE:
677 AppendDeviceList(sendDevice);
678 break;
679 case ALL_DEVICE_PROBE:
680 AppendAllDeviceList(sendDevice);
681 break;
682 case CAPTURE_DEVICE_PROBE:
683 break;
684 }
685 }
686 #+end_src
695 #+srcname: ears
696 #+begin_src clojure
697 (ns cortex.hearing)
698 (use 'cortex.world)
699 (use 'cortex.import)
700 (use 'clojure.contrib.def)
701 (cortex.import/mega-import-jme3)
702 (rlm.rlm-commands/help)
703 (import java.nio.ByteBuffer)
704 (import java.awt.image.BufferedImage)
705 (import java.awt.Color)
706 (import java.awt.Dimension)
707 (import java.awt.Graphics)
708 (import java.awt.Graphics2D)
709 (import java.awt.event.WindowAdapter)
710 (import java.awt.event.WindowEvent)
711 (import java.awt.image.BufferedImage)
712 (import java.nio.ByteBuffer)
713 (import javax.swing.JFrame)
714 (import javax.swing.JPanel)
715 (import javax.swing.SwingUtilities)
716 (import javax.swing.ImageIcon)
717 (import javax.swing.JOptionPane)
718 (import java.awt.image.ImageObserver)
720 (import 'com.jme3.capture.SoundProcessor)
723 (defn sound-processor
724 "deals with converting ByteBuffers into Arrays of bytes so that the
725 continuation functions can be defined in terms of immutable stuff."
726 [continuation]
727 (proxy [SoundProcessor] []
728 (cleanup [])
729 (process
730 [#^ByteBuffer audioSamples numSamples]
731 (no-exceptions
732 (let [byte-array (byte-array numSamples)]
733 (.get audioSamples byte-array 0 numSamples)
734 (continuation
735 (vec byte-array)))))))
737 (defn add-ear
738 "add an ear to the world. The continuation function will be called
739 on the FFT or the sounds which the ear hears in the given
740 timeframe. Sound is 3D."
741 [world listener continuation]
742 (let [renderer (.getAudioRenderer world)]
743 (.addListener renderer listener)
744 (.registerSoundProcessor renderer listener
745 (sound-processor continuation))
746 listener))
748 #+end_src
752 #+srcname: test-hearing
753 #+begin_src clojure :results silent
754 (ns test.hearing)
755 (use 'cortex.world)
756 (use 'cortex.import)
757 (use 'clojure.contrib.def)
758 (use 'body.ear)
759 (cortex.import/mega-import-jme3)
760 (rlm.rlm-commands/help)
762 (defn setup-fn [world]
763 (let [listener (Listener.)]
764 (add-ear world listener #(println (nth % 0)))))
766 (defn play-sound [node world value]
767 (if (not value)
768 (do
769 (.playSource (.getAudioRenderer world) node))))
771 (defn test-world []
772 (let [node1 (AudioNode. (asset-manager) "Sounds/pure.wav" false false)]
773 (world
774 (Node.)
775 {"key-space" (partial play-sound node1)}
776 setup-fn
777 no-op
778 )))
781 #+end_src
785 * Example
787 * COMMENT Code Generation
789 #+begin_src clojure :tangle ../../cortex/src/cortex/hearing.clj
790 <<ears>>
791 #+end_src
793 #+begin_src clojure :tangle ../../cortex/src/test/hearing.clj
794 <<test-hearing>>
795 #+end_src
798 #+begin_src C :tangle ../Alc/backends/send.c
799 <<send>>
800 #+end_src