# HG changeset patch # User Robert McIntyre # Date 1319573857 25200 # Node ID ed256a687dfee4d51539b50eae14e06c0fdcd7ec # Parent 96298d83959cdb6b0d539726c9ccdfa938946cd4 removed very old crufty java program diff -r 96298d83959c -r ed256a687dfe RecordAudioRenderer.java --- a/RecordAudioRenderer.java Tue Oct 25 13:16:27 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1183 +0,0 @@ -package com.jme3.capture; - -import static org.lwjgl.openal.AL10.AL_BUFFER; -import static org.lwjgl.openal.AL10.AL_BUFFERS_PROCESSED; -import static org.lwjgl.openal.AL10.AL_CONE_INNER_ANGLE; -import static org.lwjgl.openal.AL10.AL_CONE_OUTER_ANGLE; -import static org.lwjgl.openal.AL10.AL_CONE_OUTER_GAIN; -import static org.lwjgl.openal.AL10.AL_DIRECTION; -import static org.lwjgl.openal.AL10.AL_FALSE; -import static org.lwjgl.openal.AL10.AL_FORMAT_MONO16; -import static org.lwjgl.openal.AL10.AL_FORMAT_MONO8; -import static org.lwjgl.openal.AL10.AL_FORMAT_STEREO16; -import static org.lwjgl.openal.AL10.AL_FORMAT_STEREO8; -import static org.lwjgl.openal.AL10.AL_GAIN; -import static org.lwjgl.openal.AL10.AL_LOOPING; -import static org.lwjgl.openal.AL10.AL_MAX_DISTANCE; -import static org.lwjgl.openal.AL10.AL_ORIENTATION; -import static org.lwjgl.openal.AL10.AL_PAUSED; -import static org.lwjgl.openal.AL10.AL_PITCH; -import static org.lwjgl.openal.AL10.AL_POSITION; -import static org.lwjgl.openal.AL10.AL_REFERENCE_DISTANCE; -import static org.lwjgl.openal.AL10.AL_RENDERER; -import static org.lwjgl.openal.AL10.AL_SOURCE_RELATIVE; -import static org.lwjgl.openal.AL10.AL_SOURCE_STATE; -import static org.lwjgl.openal.AL10.AL_STOPPED; -import static org.lwjgl.openal.AL10.AL_TRUE; -import static org.lwjgl.openal.AL10.AL_VELOCITY; -import static org.lwjgl.openal.AL10.AL_VENDOR; -import static org.lwjgl.openal.AL10.AL_VERSION; -import static org.lwjgl.openal.AL10.alBufferData; -import static org.lwjgl.openal.AL10.alDeleteBuffers; -import static org.lwjgl.openal.AL10.alDeleteSources; -import static org.lwjgl.openal.AL10.alGenBuffers; -import static org.lwjgl.openal.AL10.alGenSources; -import static org.lwjgl.openal.AL10.alGetError; -import static org.lwjgl.openal.AL10.alGetSourcei; -import static org.lwjgl.openal.AL10.alGetString; -import static org.lwjgl.openal.AL10.alListener; -import static org.lwjgl.openal.AL10.alListener3f; -import static org.lwjgl.openal.AL10.alListenerf; -import static org.lwjgl.openal.AL10.alSource3f; -import static org.lwjgl.openal.AL10.alSourcePause; -import static org.lwjgl.openal.AL10.alSourcePlay; -import static org.lwjgl.openal.AL10.alSourceQueueBuffers; -import static org.lwjgl.openal.AL10.alSourceStop; -import static org.lwjgl.openal.AL10.alSourceUnqueueBuffers; -import static org.lwjgl.openal.AL10.alSourcef; -import static org.lwjgl.openal.AL10.alSourcei; - -import java.lang.reflect.Field; -import java.nio.ByteBuffer; -import java.nio.FloatBuffer; -import java.nio.IntBuffer; -import java.util.ArrayList; -import java.util.Vector; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.logging.Level; -import java.util.logging.Logger; - -import org.lwjgl.LWJGLException; -import org.lwjgl.openal.AL; -import org.lwjgl.openal.AL11; -import org.lwjgl.openal.ALC10; -import org.lwjgl.openal.ALCdevice; -import org.lwjgl.openal.EFX10; -import org.lwjgl.openal.OpenALException; - -import com.jme3.audio.AudioBuffer; -import com.jme3.audio.AudioData; -import com.jme3.audio.AudioNode; -import com.jme3.audio.AudioNode.Status; -import com.jme3.audio.AudioParam; -import com.jme3.audio.AudioRenderer; -import com.jme3.audio.AudioStream; -import com.jme3.audio.Environment; -import com.jme3.audio.Filter; -import com.jme3.audio.Listener; -import com.jme3.audio.ListenerParam; -import com.jme3.audio.LowPassFilter; -import com.jme3.math.Vector3f; -import com.jme3.util.BufferUtils; - - - -public class RecordAudioRenderer implements AudioRenderer, Runnable { - - - - public static void getMainSamples(){ - - } - - - private static final Logger logger = Logger.getLogger(RecordAudioRenderer.class.getName()); - - // When multiplied by STREAMING_BUFFER_COUNT, will equal 44100 * 2 * 2 - // which is exactly 1 second of audio. - private static final int BUFFER_SIZE = 35280; - private static final int STREAMING_BUFFER_COUNT = 5; - - private final static int MAX_NUM_CHANNELS = 2; - private IntBuffer ib = BufferUtils.createIntBuffer(1); - private final FloatBuffer fb = BufferUtils.createVector3Buffer(2); - private final ByteBuffer nativeBuf = BufferUtils.createByteBuffer(BUFFER_SIZE); - private final byte[] arrayBuf = new byte[BUFFER_SIZE]; - - private int[] channels; - private AudioNode[] chanSrcs; - private int nextChan = 0; - private ArrayList freeChans = new ArrayList(); - - private Listener listener; - private boolean audioDisabled = false; - - private boolean supportEfx = false; - private int auxSends = 0; - private int reverbFx = -1; - private int reverbFxSlot = -1; - - // RLM: this is to call the native methods which require the OpenAL device ID. - // currently it is obtained through reflection. - private long deviceID; - - // Update audio 20 times per second - private static final float UPDATE_RATE = 0.05f; - - private final Thread audioThread = new Thread(this, "jME3 Audio Thread"); - private final AtomicBoolean threadLock = new AtomicBoolean(false); - - public RecordAudioRenderer(){ - } - - public static native void helloEveryone(); - - - public static native void nstep(long device); - public void step(){ - nstep(this.deviceID); - } - - - - public void getMainSamples(ByteBuffer buffer){ - ngetMainSamples(this.deviceID, buffer, buffer.position()); - } - public static native void ngetMainSamples(long device, ByteBuffer buffer, int position); - - - public void getAuxSamples(ByteBuffer buffer){ - ngetAuxSamples(this.deviceID, buffer, buffer.position()); - } - public static native void ngetAuxSamples(long device, ByteBuffer buffer, int position); - - - - public void initialize(){ - if (!audioThread.isAlive()){ - audioThread.setDaemon(true); - audioThread.setPriority(Thread.NORM_PRIORITY+1); - audioThread.start(); - }else{ - throw new IllegalStateException("Initialize already called"); - } - } - - private void checkDead(){ - if (audioThread.getState() == Thread.State.TERMINATED) - throw new IllegalStateException("Audio thread is terminated"); - } - - public void run(){ - initInThread(); - synchronized (threadLock){ - threadLock.set(true); - threadLock.notifyAll(); - } - - - helloEveryone(); - System.out.println("AudioRecorder: Trying to call native methods."); - System.out.println("our device ID is : " + this.deviceID); - - - - - long updateRateNanos = (long) (UPDATE_RATE * 1000000000); - mainloop: while (true){ - long startTime = System.nanoTime(); - - if (Thread.interrupted()) - break; - - synchronized (threadLock){ - updateInThread(UPDATE_RATE); - } - - long endTime = System.nanoTime(); - long diffTime = endTime - startTime; - - if (diffTime < updateRateNanos){ - long desiredEndTime = startTime + updateRateNanos; - while (System.nanoTime() < desiredEndTime){ - try{ - Thread.sleep(1); - }catch (InterruptedException ex){ - break mainloop; - } - } - } - } - - synchronized (threadLock){ - cleanupInThread(); - } - } - - public void initInThread(){ - try{ - if (!AL.isCreated()){ - AL.create("Aurellem", 44100, 15, false); - } - }catch (OpenALException ex){ - logger.log(Level.SEVERE, "Failed to load audio library", ex); - audioDisabled = true; - return; - }catch (LWJGLException ex){ - logger.log(Level.SEVERE, "Failed to load audio library", ex); - audioDisabled = true; - return; - } - - ALCdevice device = AL.getDevice(); - - // RLM: use reflection to grab the ID of our device for use later. - try { - Field deviceIDField; - deviceIDField = ALCdevice.class.getDeclaredField("device"); - deviceIDField.setAccessible(true); - try {deviceID = (Long)deviceIDField.get(device);} - catch (IllegalArgumentException e) {e.printStackTrace();} - catch (IllegalAccessException e) {e.printStackTrace();} - deviceIDField.setAccessible(false);} - catch (SecurityException e) {e.printStackTrace();} - catch (NoSuchFieldException e) {e.printStackTrace();} - - - - String deviceName = ALC10.alcGetString(device, ALC10.ALC_DEVICE_SPECIFIER); - - logger.log(Level.FINER, "Audio Device: {0}", deviceName); - logger.log(Level.FINER, "Audio Vendor: {0}", alGetString(AL_VENDOR)); - logger.log(Level.FINER, "Audio Renderer: {0}", alGetString(AL_RENDERER)); - logger.log(Level.FINER, "Audio Version: {0}", alGetString(AL_VERSION)); - - // Find maximum # of sources supported by this implementation - // RLM: this may not be wise -- exceeding the number of available channels - // can crash some versions of OpenAL - ArrayList channelList = new ArrayList(); - for (int i = 0; i < MAX_NUM_CHANNELS; i++){ - int chan = alGenSources(); - if (alGetError() != 0){ - break; - }else{ - channelList.add(chan); - } - } - - channels = new int[channelList.size()]; - for (int i = 0; i < channels.length; i++){ - channels[i] = channelList.get(i); - } - - ib = BufferUtils.createIntBuffer(channels.length); - chanSrcs = new AudioNode[channels.length]; - - logger.log(Level.INFO, "AudioRenderer supports {0} channels", channels.length); - - supportEfx = ALC10.alcIsExtensionPresent(device, "ALC_EXT_EFX"); - // RLM: disable this for now. - supportEfx = false; - logger.log(Level.FINER, "Audio EFX support: {0}", supportEfx); - - if (supportEfx){ - ib.position(0).limit(1); - ALC10.alcGetInteger(device, EFX10.ALC_EFX_MAJOR_VERSION, ib); - int major = ib.get(0); - ib.position(0).limit(1); - ALC10.alcGetInteger(device, EFX10.ALC_EFX_MINOR_VERSION, ib); - int minor = ib.get(0); - logger.log(Level.INFO, "Audio effect extension version: {0}.{1}", new Object[]{major, minor}); - - ALC10.alcGetInteger(device, EFX10.ALC_MAX_AUXILIARY_SENDS, ib); - auxSends = ib.get(0); - logger.log(Level.INFO, "Audio max auxilary sends: {0}", auxSends); - - // create slot - ib.position(0).limit(1); - EFX10.alGenAuxiliaryEffectSlots(ib); - reverbFxSlot = ib.get(0); - - // create effect - ib.position(0).limit(1); - EFX10.alGenEffects(ib); - reverbFx = ib.get(0); - EFX10.alEffecti(reverbFx, EFX10.AL_EFFECT_TYPE, EFX10.AL_EFFECT_REVERB); - - // attach reverb effect to effect slot -// EFX10.alAuxiliaryEffectSloti(reverbFxSlot, EFX10.AL_EFFECTSLOT_EFFECT, reverbFx); - } - } - - public void cleanupInThread(){ - - - if (audioDisabled){ - AL.destroy(); - return; - } - - // delete channel-based sources - ib.clear(); - ib.put(channels); - ib.flip(); - alDeleteSources(ib); - - if (supportEfx){ - ib.position(0).limit(1); - ib.put(0, reverbFx); - EFX10.alDeleteEffects(ib); - - ib.position(0).limit(1); - ib.put(0, reverbFxSlot); - EFX10.alDeleteAuxiliaryEffectSlots(ib); - } - - // XXX: Delete other buffers/sources - AL.destroy(); - } - - public void cleanup(){ - // kill audio thread - - if (audioThread.isAlive()){ - audioThread.interrupt(); - } - - Byte[] data1 = new Byte[this.fullWaveData1.size()]; - data1 = this.fullWaveData1.toArray(data1); - - Byte[] data2 = new Byte[this.fullWaveData2.size()]; - data2 = this.fullWaveData2.toArray(data2); - System.out.println(this.fullWaveData1.size()); - System.out.println("Saving WAVE data!"); - /*for (int i = 0; i < data1.length;i++){ - System.out.print(data1[i]+","); - if (i%32 ==0){System.out.println();} - } - */ - - - StdAudio.save("/home/r/wave-output/data2.wav", data2); - StdAudio.save("/home/r/wave-output/data1.wav", data1); - } - - private void updateFilter(Filter f){ - int id = f.getId(); - if (id == -1){ - ib.position(0).limit(1); - EFX10.alGenFilters(ib); - id = ib.get(0); - f.setId(id); - } - - if (f instanceof LowPassFilter){ - LowPassFilter lpf = (LowPassFilter) f; - EFX10.alFilteri(id, EFX10.AL_FILTER_TYPE, EFX10.AL_FILTER_LOWPASS); - EFX10.alFilterf(id, EFX10.AL_LOWPASS_GAIN, lpf.getVolume()); - EFX10.alFilterf(id, EFX10.AL_LOWPASS_GAINHF, lpf.getHighFreqVolume()); - }else{ - throw new UnsupportedOperationException("Filter type unsupported: "+ - f.getClass().getName()); - } - - f.clearUpdateNeeded(); - } - - public void updateSourceParam(AudioNode src, AudioParam param){ - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - // There is a race condition in AudioNode that can - // cause this to be called for a node that has been - // detached from its channel. For example, setVolume() - // called from the render thread may see that that AudioNode - // still has a channel value but the audio thread may - // clear that channel before setVolume() gets to call - // updateSourceParam() (because the audio stopped playing - // on its own right as the volume was set). In this case, - // it should be safe to just ignore the update - if (src.getChannel() < 0) - return; - - assert src.getChannel() >= 0; - - int id = channels[src.getChannel()]; - switch (param){ - case Position: - if (!src.isPositional()) - return; - - Vector3f pos = src.getWorldTranslation(); - alSource3f(id, AL_POSITION, pos.x, pos.y, pos.z); - break; - case Velocity: - if (!src.isPositional()) - return; - - Vector3f vel = src.getVelocity(); - alSource3f(id, AL_VELOCITY, vel.x, vel.y, vel.z); - break; - case MaxDistance: - if (!src.isPositional()) - return; - - alSourcef(id, AL_MAX_DISTANCE, src.getMaxDistance()); - break; - case RefDistance: - if (!src.isPositional()) - return; - - alSourcef(id, AL_REFERENCE_DISTANCE, src.getRefDistance()); - break; - case ReverbFilter: - if (!src.isPositional() || !src.isReverbEnabled()) - return; - - int filter = EFX10.AL_FILTER_NULL; - if (src.getReverbFilter() != null){ - Filter f = src.getReverbFilter(); - if (f.isUpdateNeeded()){ - updateFilter(f); - } - filter = f.getId(); - } - AL11.alSource3i(id, EFX10.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter); - break; - case ReverbEnabled: - if (!src.isPositional()) - return; - - if (src.isReverbEnabled()){ - updateSourceParam(src, AudioParam.ReverbFilter); - }else{ - AL11.alSource3i(id, EFX10.AL_AUXILIARY_SEND_FILTER, 0, 0, EFX10.AL_FILTER_NULL); - } - break; - case IsPositional: - if (!src.isPositional()){ - // play in headspace - alSourcei(id, AL_SOURCE_RELATIVE, AL_TRUE); - alSource3f(id, AL_POSITION, 0,0,0); - alSource3f(id, AL_VELOCITY, 0,0,0); - }else{ - alSourcei(id, AL_SOURCE_RELATIVE, AL_FALSE); - updateSourceParam(src, AudioParam.Position); - updateSourceParam(src, AudioParam.Velocity); - updateSourceParam(src, AudioParam.MaxDistance); - updateSourceParam(src, AudioParam.RefDistance); - updateSourceParam(src, AudioParam.ReverbEnabled); - } - break; - case Direction: - if (!src.isDirectional()) - return; - - Vector3f dir = src.getDirection(); - alSource3f(id, AL_DIRECTION, dir.x, dir.y, dir.z); - break; - case InnerAngle: - if (!src.isDirectional()) - return; - - alSourcef(id, AL_CONE_INNER_ANGLE, src.getInnerAngle()); - break; - case OuterAngle: - if (!src.isDirectional()) - return; - - alSourcef(id, AL_CONE_OUTER_ANGLE, src.getOuterAngle()); - break; - case IsDirectional: - if (src.isDirectional()){ - updateSourceParam(src, AudioParam.Direction); - updateSourceParam(src, AudioParam.InnerAngle); - updateSourceParam(src, AudioParam.OuterAngle); - alSourcef(id, AL_CONE_OUTER_GAIN, 0); - }else{ - alSourcef(id, AL_CONE_INNER_ANGLE, 360); - alSourcef(id, AL_CONE_OUTER_ANGLE, 360); - alSourcef(id, AL_CONE_OUTER_GAIN, 1f); - } - break; - case DryFilter: - if (src.getDryFilter() != null){ - Filter f = src.getDryFilter(); - if (f.isUpdateNeeded()){ - updateFilter(f); - - // NOTE: must re-attach filter for changes to apply. - alSourcei(id, EFX10.AL_DIRECT_FILTER, f.getId()); - } - }else{ - alSourcei(id, EFX10.AL_DIRECT_FILTER, EFX10.AL_FILTER_NULL); - } - break; - case Looping: - if (src.isLooping()){ - if (!(src.getAudioData() instanceof AudioStream)){ - alSourcei(id, AL_LOOPING, AL_TRUE); - } - }else{ - alSourcei(id, AL_LOOPING, AL_FALSE); - } - break; - case Volume: - alSourcef(id, AL_GAIN, src.getVolume()); - break; - case Pitch: - alSourcef(id, AL_PITCH, src.getPitch()); - break; - } - } - } - - private void setSourceParams(int id, AudioNode src, boolean forceNonLoop){ - if (src.isPositional()){ - Vector3f pos = src.getWorldTranslation(); - Vector3f vel = src.getVelocity(); - alSource3f(id, AL_POSITION, pos.x, pos.y, pos.z); - alSource3f(id, AL_VELOCITY, vel.x, vel.y, vel.z); - alSourcef(id, AL_MAX_DISTANCE, src.getMaxDistance()); - alSourcef(id, AL_REFERENCE_DISTANCE, src.getRefDistance()); - alSourcei(id, AL_SOURCE_RELATIVE, AL_FALSE); - - if (src.isReverbEnabled()){ - int filter = EFX10.AL_FILTER_NULL; - if (src.getReverbFilter() != null){ - Filter f = src.getReverbFilter(); - if (f.isUpdateNeeded()){ - updateFilter(f); - } - filter = f.getId(); - } - AL11.alSource3i(id, EFX10.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter); - } - }else{ - // play in headspace - alSourcei(id, AL_SOURCE_RELATIVE, AL_TRUE); - alSource3f(id, AL_POSITION, 0,0,0); - alSource3f(id, AL_VELOCITY, 0,0,0); - } - - if (src.getDryFilter() != null){ - Filter f = src.getDryFilter(); - if (f.isUpdateNeeded()){ - updateFilter(f); - - // NOTE: must re-attach filter for changes to apply. - alSourcei(id, EFX10.AL_DIRECT_FILTER, f.getId()); - } - } - - if (forceNonLoop){ - alSourcei(id, AL_LOOPING, AL_FALSE); - }else{ - alSourcei(id, AL_LOOPING, src.isLooping() ? AL_TRUE : AL_FALSE); - } - alSourcef(id, AL_GAIN, src.getVolume()); - alSourcef(id, AL_PITCH, src.getPitch()); - alSourcef(id, AL11.AL_SEC_OFFSET, src.getTimeOffset()); - - if (src.isDirectional()){ - Vector3f dir = src.getDirection(); - alSource3f(id, AL_DIRECTION, dir.x, dir.y, dir.z); - alSourcef(id, AL_CONE_INNER_ANGLE, src.getInnerAngle()); - alSourcef(id, AL_CONE_OUTER_ANGLE, src.getOuterAngle()); - alSourcef(id, AL_CONE_OUTER_GAIN, 0); - }else{ - alSourcef(id, AL_CONE_INNER_ANGLE, 360); - alSourcef(id, AL_CONE_OUTER_ANGLE, 360); - alSourcef(id, AL_CONE_OUTER_GAIN, 1f); - } - } - - public void updateListenerParam(Listener listener, ListenerParam param){ - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - switch (param){ - case Position: - Vector3f pos = listener.getLocation(); - alListener3f(AL_POSITION, pos.x, pos.y, pos.z); - break; - case Rotation: - Vector3f dir = listener.getDirection(); - Vector3f up = listener.getUp(); - fb.rewind(); - fb.put(dir.x).put(dir.y).put(dir.z); - fb.put(up.x).put(up.y).put(up.z); - fb.flip(); - alListener(AL_ORIENTATION, fb); - break; - case Velocity: - Vector3f vel = listener.getVelocity(); - alListener3f(AL_VELOCITY, vel.x, vel.y, vel.z); - break; - case Volume: - alListenerf(AL_GAIN, listener.getVolume()); - break; - } - } - } - - private void setListenerParams(Listener listener){ - Vector3f pos = listener.getLocation(); - Vector3f vel = listener.getVelocity(); - Vector3f dir = listener.getDirection(); - Vector3f up = listener.getUp(); - - alListener3f(AL_POSITION, pos.x, pos.y, pos.z); - alListener3f(AL_VELOCITY, vel.x, vel.y, vel.z); - fb.rewind(); - fb.put(dir.x).put(dir.y).put(dir.z); - fb.put(up.x).put(up.y).put(up.z); - fb.flip(); - alListener(AL_ORIENTATION, fb); - alListenerf(AL_GAIN, listener.getVolume()); - } - - private int newChannel(){ - if (freeChans.size() > 0) - return freeChans.remove(0); - else if (nextChan < channels.length){ - return nextChan++; - }else{ - return -1; - } - } - - private void freeChannel(int index){ - if (index == nextChan-1){ - nextChan--; - } else{ - freeChans.add(index); - } - } - - public void setEnvironment(Environment env){ - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DENSITY, env.getDensity()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DIFFUSION, env.getDiffusion()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_GAIN, env.getGain()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_GAINHF, env.getGainHf()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DECAY_TIME, env.getDecayTime()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DECAY_HFRATIO, env.getDecayHFRatio()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_REFLECTIONS_GAIN, env.getReflectGain()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_REFLECTIONS_DELAY, env.getReflectDelay()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_LATE_REVERB_GAIN, env.getLateReverbGain()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_LATE_REVERB_DELAY, env.getLateReverbDelay()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_AIR_ABSORPTION_GAINHF, env.getAirAbsorbGainHf()); - EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_ROOM_ROLLOFF_FACTOR, env.getRoomRolloffFactor()); - - // attach effect to slot - EFX10.alAuxiliaryEffectSloti(reverbFxSlot, EFX10.AL_EFFECTSLOT_EFFECT, reverbFx); - } - } - - private boolean fillBuffer(AudioStream stream, int id){ - int size = 0; - int result; - - while (size < arrayBuf.length){ - result = stream.readSamples(arrayBuf, size, arrayBuf.length - size); - - if(result > 0){ - size += result; - }else{ - break; - } - } - - if(size == 0) - return false; - - nativeBuf.clear(); - nativeBuf.put(arrayBuf, 0, size); - nativeBuf.flip(); - - alBufferData(id, convertFormat(stream), nativeBuf, stream.getSampleRate()); - - return true; - } - - private boolean fillStreamingSource(int sourceId, AudioStream stream){ - if (!stream.isOpen()) - return false; - - boolean active = true; - int processed = alGetSourcei(sourceId, AL_BUFFERS_PROCESSED); - -// while((processed--) != 0){ - if (processed > 0){ - int buffer; - - ib.position(0).limit(1); - alSourceUnqueueBuffers(sourceId, ib); - buffer = ib.get(0); - - active = fillBuffer(stream, buffer); - - ib.position(0).limit(1); - ib.put(0, buffer); - alSourceQueueBuffers(sourceId, ib); - } - - if (!active && stream.isOpen()) - stream.close(); - - return active; - } - - private boolean attachStreamToSource(int sourceId, AudioStream stream){ - boolean active = true; - for (int id : stream.getIds()){ - active = fillBuffer(stream, id); - ib.position(0).limit(1); - ib.put(id).flip(); - alSourceQueueBuffers(sourceId, ib); - } - return active; - } - - private boolean attachBufferToSource(int sourceId, AudioBuffer buffer){ - alSourcei(sourceId, AL_BUFFER, buffer.getId()); - return true; - } - - private boolean attachAudioToSource(int sourceId, AudioData data){ - if (data instanceof AudioBuffer){ - return attachBufferToSource(sourceId, (AudioBuffer) data); - }else if (data instanceof AudioStream){ - return attachStreamToSource(sourceId, (AudioStream) data); - } - throw new UnsupportedOperationException(); - } - - private void clearChannel(int index){ - // make room at this channel - if (chanSrcs[index] != null){ - AudioNode src = chanSrcs[index]; - - int sourceId = channels[index]; - alSourceStop(sourceId); - - if (src.getAudioData() instanceof AudioStream){ - AudioStream str = (AudioStream) src.getAudioData(); - ib.position(0).limit(STREAMING_BUFFER_COUNT); - ib.put(str.getIds()).flip(); - alSourceUnqueueBuffers(sourceId, ib); - }else if (src.getAudioData() instanceof AudioBuffer){ - alSourcei(sourceId, AL_BUFFER, 0); - } - - if (src.getDryFilter() != null){ - // detach filter - alSourcei(sourceId, EFX10.AL_DIRECT_FILTER, EFX10.AL_FILTER_NULL); - } - if (src.isPositional()){ - AudioNode pas = (AudioNode) src; - if (pas.isReverbEnabled()) { - AL11.alSource3i(sourceId, EFX10.AL_AUXILIARY_SEND_FILTER, 0, 0, EFX10.AL_FILTER_NULL); - } - } - - chanSrcs[index] = null; - } - } - - public void update(float tpf){ - //ByteBuffer test = BufferUtils.createByteBuffer(1); - //AurellemTransport.getAuxSamples(AL.getDevice(), test); - } - - Vector fullWaveData1 = new Vector(); - Vector fullWaveData2 = new Vector(); - - public void updateInThread(float tpf){ - if (audioDisabled) - return; - - step(); - ByteBuffer test = BufferUtils.createByteBuffer(4096); - test.clear(); - this.getMainSamples(test); - byte[] waveData = new byte[4096]; - test.get(waveData, 0, 4096); - //System.out.println("J DATA:"); - /*for (int j = 0; j < 1; j++){ - for(int i = 64 * j; i < (64*j) + 64; i++){ - System.out.print(waveData[i]); - } - System.out.println(); - }*/ - - ByteBuffer test2 = BufferUtils.createByteBuffer(4096); - test2.clear(); - this.getAuxSamples(test2); - byte[] waveData2 = new byte[4096]; - test2.get(waveData2, 0, 4096); - //System.out.print("wave1:"); - //for (int j = 0; j< 32; j++){ - // System.out.print(waveData[j]+","); - // } - //System.out.println(); - //System.out.print("wave2:"); - // for (int j = 0; j< 4096; j++){ - // if (waveData2[j] != 0){ - // System.out.println("fucked at : " + j); - // } - - /* System.out.print(waveData2[j]+","); - if (0 == (j % 64)){System.out.println();}*/ - //} - //System.out.println(); - - for (byte b : waveData){ - this.fullWaveData1.add(b); - } - - for (byte b : waveData2){ - this.fullWaveData2.add(b); - } - - - for (int i = 0; i < channels.length; i++){ - AudioNode src = chanSrcs[i]; - if (src == null) - continue; - - int sourceId = channels[i]; - - // is the source bound to this channel - // if false, it's an instanced playback - boolean boundSource = i == src.getChannel(); - - // source's data is streaming - boolean streaming = src.getAudioData() instanceof AudioStream; - - // only buffered sources can be bound - assert (boundSource && streaming) || (!streaming); - - int state = alGetSourcei(sourceId, AL_SOURCE_STATE); - boolean wantPlaying = src.getStatus() == Status.Playing; - boolean stopped = state == AL_STOPPED; - - if (streaming && wantPlaying){ - AudioStream stream = (AudioStream) src.getAudioData(); - if (stream.isOpen()){ - fillStreamingSource(sourceId, stream); - if (stopped) - alSourcePlay(sourceId); - - }else{ - if (stopped){ - // became inactive - src.setStatus(Status.Stopped); - src.setChannel(-1); - clearChannel(i); - freeChannel(i); - - // And free the audio since it cannot be - // played again anyway. - deleteAudioData(stream); - } - } - }else if (!streaming){ - boolean paused = state == AL_PAUSED; - - // make sure OAL pause state & source state coincide - assert (src.getStatus() == Status.Paused && paused) || (!paused); - - if (stopped){ - if (boundSource){ - src.setStatus(Status.Stopped); - src.setChannel(-1); - } - clearChannel(i); - freeChannel(i); - } - } - } - } - - public void setListener(Listener listener) { - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - if (this.listener != null){ - // previous listener no longer associated with current - // renderer - this.listener.setRenderer(null); - } - - this.listener = listener; - this.listener.setRenderer(this); - setListenerParams(listener); - } - } - - public void playSourceInstance(AudioNode src){ - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - if (src.getAudioData() instanceof AudioStream) - throw new UnsupportedOperationException( - "Cannot play instances " + - "of audio streams. Use playSource() instead."); - - if (src.getAudioData().isUpdateNeeded()){ - updateAudioData(src.getAudioData()); - } - - // create a new index for an audio-channel - int index = newChannel(); - if (index == -1) - return; - - int sourceId = channels[index]; - - clearChannel(index); - - // set parameters, like position and max distance - setSourceParams(sourceId, src, true); - attachAudioToSource(sourceId, src.getAudioData()); - chanSrcs[index] = src; - - // play the channel - alSourcePlay(sourceId); - } - } - - - public void playSource(AudioNode src) { - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - //assert src.getStatus() == Status.Stopped || src.getChannel() == -1; - - if (src.getStatus() == Status.Playing){ - return; - }else if (src.getStatus() == Status.Stopped){ - - // allocate channel to this source - int index = newChannel(); - if (index == -1) { - logger.log(Level.WARNING, "No channel available to play {0}", src); - return; - } - clearChannel(index); - src.setChannel(index); - - AudioData data = src.getAudioData(); - if (data.isUpdateNeeded()) - updateAudioData(data); - - chanSrcs[index] = src; - setSourceParams(channels[index], src, false); - attachAudioToSource(channels[index], data); - } - - alSourcePlay(channels[src.getChannel()]); - src.setStatus(Status.Playing); - } - } - - - public void pauseSource(AudioNode src) { - checkDead(); - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - if (src.getStatus() == Status.Playing){ - assert src.getChannel() != -1; - - alSourcePause(channels[src.getChannel()]); - src.setStatus(Status.Paused); - } - } - } - - - public void stopSource(AudioNode src) { - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - if (src.getStatus() != Status.Stopped){ - int chan = src.getChannel(); - assert chan != -1; // if it's not stopped, must have id - - src.setStatus(Status.Stopped); - src.setChannel(-1); - clearChannel(chan); - freeChannel(chan); - - if (src.getAudioData() instanceof AudioStream) { - - AudioStream stream = (AudioStream)src.getAudioData(); - if (stream.isOpen()) { - stream.close(); - } - - // And free the audio since it cannot be - // played again anyway. - deleteAudioData(src.getAudioData()); - } - } - } - } - - private int convertFormat(AudioData ad){ - switch (ad.getBitsPerSample()){ - case 8: - if (ad.getChannels() == 1) - return AL_FORMAT_MONO8; - else if (ad.getChannels() == 2) - return AL_FORMAT_STEREO8; - - break; - case 16: - if (ad.getChannels() == 1) - return AL_FORMAT_MONO16; - else - return AL_FORMAT_STEREO16; - } - throw new UnsupportedOperationException("Unsupported channels/bits combination: "+ - "bits="+ad.getBitsPerSample()+", channels="+ad.getChannels()); - } - - private void updateAudioBuffer(AudioBuffer ab){ - int id = ab.getId(); - if (ab.getId() == -1){ - ib.position(0).limit(1); - alGenBuffers(ib); - id = ib.get(0); - ab.setId(id); - } - - ab.getData().clear(); - alBufferData(id, convertFormat(ab), ab.getData(), ab.getSampleRate()); - ab.clearUpdateNeeded(); - } - - private void updateAudioStream(AudioStream as){ - if (as.getIds() != null){ - deleteAudioData(as); - } - - int[] ids = new int[STREAMING_BUFFER_COUNT]; - ib.position(0).limit(STREAMING_BUFFER_COUNT); - alGenBuffers(ib); - ib.position(0).limit(STREAMING_BUFFER_COUNT); - ib.get(ids); - - as.setIds(ids); - as.clearUpdateNeeded(); - } - - private void updateAudioData(AudioData ad){ - if (ad instanceof AudioBuffer){ - updateAudioBuffer((AudioBuffer) ad); - }else if (ad instanceof AudioStream){ - updateAudioStream((AudioStream) ad); - } - } - - public void deleteAudioData(AudioData ad){ - synchronized (threadLock){ - while (!threadLock.get()){ - try { - threadLock.wait(); - } catch (InterruptedException ex) { - } - } - if (audioDisabled) - return; - - if (ad instanceof AudioBuffer){ - AudioBuffer ab = (AudioBuffer) ad; - int id = ab.getId(); - if (id != -1){ - ib.put(0,id); - ib.position(0).limit(1); - alDeleteBuffers(ib); - ab.resetObject(); - } - }else if (ad instanceof AudioStream){ - AudioStream as = (AudioStream) ad; - int[] ids = as.getIds(); - if (ids != null){ - ib.clear(); - ib.put(ids).flip(); - alDeleteBuffers(ib); - as.resetObject(); - } - } - } - } - -} diff -r 96298d83959c -r ed256a687dfe cleanup-message.txt --- a/cleanup-message.txt Tue Oct 25 13:16:27 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,100 +0,0 @@ -My name is Robert McIntyre. I am seeking help packaging some changes -I've made to open-al. - -* tl;dr how do I distribute changes to open-al which involve adding a - new device? - -* Background / Motivation - -I'm working on an AI simulation involving multiple listeners, where -each listener is a separate AI entity. Since each AI entity can move -independently, I needed to add multiple listener functionality to -open-al. Furthermore, my entire simulation allows time to dilate -depending on how hard the entities are collectively "thinking," so -that the entities can keep up with the simulation. Therefore, I -needed to have a system that renders sound on-demand instead of trying -to render in real time as open-al does. - -I don't need any of the more advanced effects, just 3D positioning, -and I'm only using open-al from jMonkeyEngine3 that uses the LWJGL -bindings that importantly only allow access to one and only one -context. - -Under these constraints, I made a new device which renders sound in -small, user-defined increments. It must be explicitly told to render -sound or it won't do anything. It maintains a separate "auxiliary -context" for every additional listener, and syncs the sources from the -LWJGL context whenever it is about to render samples. I've tested it -and it works quite well for my purposes. So far, I've gotten 1,000 -separate listeners to work in a single simulation easily. - -The code is here: -http://aurellem.localhost/audio-send/html/send.html -No criticism is too harsh! - -Note that the java JNI bindings that are part of the device code right -now would be moved to a separate file the the same way that LWJGL does -it. I left them there for now to show how the device might be used. - -Although I made this for my own AI work, it's ideal for recording -perfect audio from a video game to create trailers/demos, since the -computer doesn't have to try to record the sound in real time. This -device could be used to record audio in any system that wraps open-al -and only exposes one context, which is what many wrappers do. - - -* Actual Question - -My question is about packaging --- how do you recommend I distribute -my new device? I got it to work by just grafting it on the open-al's -primitive object system, but this requires quite a few changes to main -open-al source files, and as far as I can tell requires me to -recompile open-al against my new device. - -I also don't want the user to be able to hide my devices presence -using their ~/.alsoftrc file, since that gets in the way of easy -recording when the system is wrapped several layers deep, and they've -already implicitly requested my device anyway by using my code in the -first place. - -The options I have thought of so far are: - -1.) Create my own C-artifact, compiled against open-al, which somehow -"hooks in" my device to open-al and forces open-al to use it to the -exclusion of all other devices. This new artifact would have bindings -for java, etc. I don't know how to do this, since there doesn't seem -to be any way to access the list of devices in Alc/ALc.c for example. -In order to add a new device to open-al I had to modify 5 separate -files, documented here: - -http://aurellem.localhost/audio-send/html/add-new-device.html - -and there doesn't seem to be a way to get the same effect -programmatically. - -2.) Strip down open-al to a minimal version that only has my device -and deal with selecting the right open-al library at a higher level, -depending on whether the user wants to record sound or actually hear -it. I don't like this because I can't easily benefit from -improvements in the main open-al distribution. It also would involve -more significant modification to jMonkeyEngine3's logic which selects -the appropriate C-artifact at runtime. - -3.) Get this new device added to open-al, and provide a java wrapper -for it in a separate artifact. Problem here is that this device does -not have the same semantics as the other devices --- it must be told -to render sound, doesn't support multiple user-created contexts, and -it exposes extra functions for retrieving the rendered sounds. It also -might be too "niche" for open-al proper. - -4.) Maybe abandon the device metaphor and use something better suited -to my problem that /can/ be done as in (1)? - - -I'm sure someone here knows enough about open-al's devices to give me -a better solution than these 4! All help would be most appreciated. - -sincerely, ---Robert McIntyre - - diff -r 96298d83959c -r ed256a687dfe org/cleanup-message.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/org/cleanup-message.txt Tue Oct 25 13:17:37 2011 -0700 @@ -0,0 +1,100 @@ +My name is Robert McIntyre. I am seeking help packaging some changes +I've made to open-al. + +* tl;dr how do I distribute changes to open-al which involve adding a + new device? + +* Background / Motivation + +I'm working on an AI simulation involving multiple listeners, where +each listener is a separate AI entity. Since each AI entity can move +independently, I needed to add multiple listener functionality to +open-al. Furthermore, my entire simulation allows time to dilate +depending on how hard the entities are collectively "thinking," so +that the entities can keep up with the simulation. Therefore, I +needed to have a system that renders sound on-demand instead of trying +to render in real time as open-al does. + +I don't need any of the more advanced effects, just 3D positioning, +and I'm only using open-al from jMonkeyEngine3 that uses the LWJGL +bindings that importantly only allow access to one and only one +context. + +Under these constraints, I made a new device which renders sound in +small, user-defined increments. It must be explicitly told to render +sound or it won't do anything. It maintains a separate "auxiliary +context" for every additional listener, and syncs the sources from the +LWJGL context whenever it is about to render samples. I've tested it +and it works quite well for my purposes. So far, I've gotten 1,000 +separate listeners to work in a single simulation easily. + +The code is here: +http://aurellem.localhost/audio-send/html/send.html +No criticism is too harsh! + +Note that the java JNI bindings that are part of the device code right +now would be moved to a separate file the the same way that LWJGL does +it. I left them there for now to show how the device might be used. + +Although I made this for my own AI work, it's ideal for recording +perfect audio from a video game to create trailers/demos, since the +computer doesn't have to try to record the sound in real time. This +device could be used to record audio in any system that wraps open-al +and only exposes one context, which is what many wrappers do. + + +* Actual Question + +My question is about packaging --- how do you recommend I distribute +my new device? I got it to work by just grafting it on the open-al's +primitive object system, but this requires quite a few changes to main +open-al source files, and as far as I can tell requires me to +recompile open-al against my new device. + +I also don't want the user to be able to hide my devices presence +using their ~/.alsoftrc file, since that gets in the way of easy +recording when the system is wrapped several layers deep, and they've +already implicitly requested my device anyway by using my code in the +first place. + +The options I have thought of so far are: + +1.) Create my own C-artifact, compiled against open-al, which somehow +"hooks in" my device to open-al and forces open-al to use it to the +exclusion of all other devices. This new artifact would have bindings +for java, etc. I don't know how to do this, since there doesn't seem +to be any way to access the list of devices in Alc/ALc.c for example. +In order to add a new device to open-al I had to modify 5 separate +files, documented here: + +http://aurellem.localhost/audio-send/html/add-new-device.html + +and there doesn't seem to be a way to get the same effect +programmatically. + +2.) Strip down open-al to a minimal version that only has my device +and deal with selecting the right open-al library at a higher level, +depending on whether the user wants to record sound or actually hear +it. I don't like this because I can't easily benefit from +improvements in the main open-al distribution. It also would involve +more significant modification to jMonkeyEngine3's logic which selects +the appropriate C-artifact at runtime. + +3.) Get this new device added to open-al, and provide a java wrapper +for it in a separate artifact. Problem here is that this device does +not have the same semantics as the other devices --- it must be told +to render sound, doesn't support multiple user-created contexts, and +it exposes extra functions for retrieving the rendered sounds. It also +might be too "niche" for open-al proper. + +4.) Maybe abandon the device metaphor and use something better suited +to my problem that /can/ be done as in (1)? + + +I'm sure someone here knows enough about open-al's devices to give me +a better solution than these 4! All help would be most appreciated. + +sincerely, +--Robert McIntyre + +