rlm@0: package com.jme3.capture; rlm@0: rlm@0: import static org.lwjgl.openal.AL10.AL_BUFFER; rlm@0: import static org.lwjgl.openal.AL10.AL_BUFFERS_PROCESSED; rlm@0: import static org.lwjgl.openal.AL10.AL_CONE_INNER_ANGLE; rlm@0: import static org.lwjgl.openal.AL10.AL_CONE_OUTER_ANGLE; rlm@0: import static org.lwjgl.openal.AL10.AL_CONE_OUTER_GAIN; rlm@0: import static org.lwjgl.openal.AL10.AL_DIRECTION; rlm@0: import static org.lwjgl.openal.AL10.AL_FALSE; rlm@0: import static org.lwjgl.openal.AL10.AL_FORMAT_MONO16; rlm@0: import static org.lwjgl.openal.AL10.AL_FORMAT_MONO8; rlm@0: import static org.lwjgl.openal.AL10.AL_FORMAT_STEREO16; rlm@0: import static org.lwjgl.openal.AL10.AL_FORMAT_STEREO8; rlm@0: import static org.lwjgl.openal.AL10.AL_GAIN; rlm@0: import static org.lwjgl.openal.AL10.AL_LOOPING; rlm@0: import static org.lwjgl.openal.AL10.AL_MAX_DISTANCE; rlm@0: import static org.lwjgl.openal.AL10.AL_ORIENTATION; rlm@0: import static org.lwjgl.openal.AL10.AL_PAUSED; rlm@0: import static org.lwjgl.openal.AL10.AL_PITCH; rlm@0: import static org.lwjgl.openal.AL10.AL_POSITION; rlm@0: import static org.lwjgl.openal.AL10.AL_REFERENCE_DISTANCE; rlm@0: import static org.lwjgl.openal.AL10.AL_RENDERER; rlm@0: import static org.lwjgl.openal.AL10.AL_SOURCE_RELATIVE; rlm@0: import static org.lwjgl.openal.AL10.AL_SOURCE_STATE; rlm@0: import static org.lwjgl.openal.AL10.AL_STOPPED; rlm@0: import static org.lwjgl.openal.AL10.AL_TRUE; rlm@0: import static org.lwjgl.openal.AL10.AL_VELOCITY; rlm@0: import static org.lwjgl.openal.AL10.AL_VENDOR; rlm@0: import static org.lwjgl.openal.AL10.AL_VERSION; rlm@0: import static org.lwjgl.openal.AL10.alBufferData; rlm@0: import static org.lwjgl.openal.AL10.alDeleteBuffers; rlm@0: import static org.lwjgl.openal.AL10.alDeleteSources; rlm@0: import static org.lwjgl.openal.AL10.alGenBuffers; rlm@0: import static org.lwjgl.openal.AL10.alGenSources; rlm@0: import static org.lwjgl.openal.AL10.alGetError; rlm@0: import static org.lwjgl.openal.AL10.alGetSourcei; rlm@0: import static org.lwjgl.openal.AL10.alGetString; rlm@0: import static org.lwjgl.openal.AL10.alListener; rlm@0: import static org.lwjgl.openal.AL10.alListener3f; rlm@0: import static org.lwjgl.openal.AL10.alListenerf; rlm@0: import static org.lwjgl.openal.AL10.alSource3f; rlm@0: import static org.lwjgl.openal.AL10.alSourcePause; rlm@0: import static org.lwjgl.openal.AL10.alSourcePlay; rlm@0: import static org.lwjgl.openal.AL10.alSourceQueueBuffers; rlm@0: import static org.lwjgl.openal.AL10.alSourceStop; rlm@0: import static org.lwjgl.openal.AL10.alSourceUnqueueBuffers; rlm@0: import static org.lwjgl.openal.AL10.alSourcef; rlm@0: import static org.lwjgl.openal.AL10.alSourcei; rlm@0: rlm@0: import java.lang.reflect.Field; rlm@0: import java.nio.ByteBuffer; rlm@0: import java.nio.FloatBuffer; rlm@0: import java.nio.IntBuffer; rlm@0: import java.util.ArrayList; rlm@0: import java.util.Vector; rlm@0: import java.util.concurrent.atomic.AtomicBoolean; rlm@0: import java.util.logging.Level; rlm@0: import java.util.logging.Logger; rlm@0: rlm@0: import org.lwjgl.LWJGLException; rlm@0: import org.lwjgl.openal.AL; rlm@0: import org.lwjgl.openal.AL11; rlm@0: import org.lwjgl.openal.ALC10; rlm@0: import org.lwjgl.openal.ALCdevice; rlm@0: import org.lwjgl.openal.EFX10; rlm@0: import org.lwjgl.openal.OpenALException; rlm@0: rlm@0: import com.jme3.audio.AudioBuffer; rlm@0: import com.jme3.audio.AudioData; rlm@0: import com.jme3.audio.AudioNode; rlm@0: import com.jme3.audio.AudioNode.Status; rlm@0: import com.jme3.audio.AudioParam; rlm@0: import com.jme3.audio.AudioRenderer; rlm@0: import com.jme3.audio.AudioStream; rlm@0: import com.jme3.audio.Environment; rlm@0: import com.jme3.audio.Filter; rlm@0: import com.jme3.audio.Listener; rlm@0: import com.jme3.audio.ListenerParam; rlm@0: import com.jme3.audio.LowPassFilter; rlm@0: import com.jme3.math.Vector3f; rlm@0: import com.jme3.util.BufferUtils; rlm@0: rlm@0: rlm@0: rlm@0: public class RecordAudioRenderer implements AudioRenderer, Runnable { rlm@0: rlm@0: rlm@0: rlm@0: public static void getMainSamples(){ rlm@0: rlm@0: } rlm@0: rlm@0: rlm@0: private static final Logger logger = Logger.getLogger(RecordAudioRenderer.class.getName()); rlm@0: rlm@0: // When multiplied by STREAMING_BUFFER_COUNT, will equal 44100 * 2 * 2 rlm@0: // which is exactly 1 second of audio. rlm@0: private static final int BUFFER_SIZE = 35280; rlm@0: private static final int STREAMING_BUFFER_COUNT = 5; rlm@0: rlm@0: private final static int MAX_NUM_CHANNELS = 2; rlm@0: private IntBuffer ib = BufferUtils.createIntBuffer(1); rlm@0: private final FloatBuffer fb = BufferUtils.createVector3Buffer(2); rlm@0: private final ByteBuffer nativeBuf = BufferUtils.createByteBuffer(BUFFER_SIZE); rlm@0: private final byte[] arrayBuf = new byte[BUFFER_SIZE]; rlm@0: rlm@0: private int[] channels; rlm@0: private AudioNode[] chanSrcs; rlm@0: private int nextChan = 0; rlm@0: private ArrayList freeChans = new ArrayList(); rlm@0: rlm@0: private Listener listener; rlm@0: private boolean audioDisabled = false; rlm@0: rlm@0: private boolean supportEfx = false; rlm@0: private int auxSends = 0; rlm@0: private int reverbFx = -1; rlm@0: private int reverbFxSlot = -1; rlm@0: rlm@0: // RLM: this is to call the native methods which require the OpenAL device ID. rlm@0: // currently it is obtained through reflection. rlm@0: private long deviceID; rlm@0: rlm@0: // Update audio 20 times per second rlm@0: private static final float UPDATE_RATE = 0.05f; rlm@0: rlm@0: private final Thread audioThread = new Thread(this, "jME3 Audio Thread"); rlm@0: private final AtomicBoolean threadLock = new AtomicBoolean(false); rlm@0: rlm@0: public RecordAudioRenderer(){ rlm@0: } rlm@0: rlm@0: public static native void helloEveryone(); rlm@0: rlm@0: rlm@0: public static native void nstep(long device); rlm@0: public void step(){ rlm@0: nstep(this.deviceID); rlm@0: } rlm@0: rlm@0: rlm@0: rlm@0: public void getMainSamples(ByteBuffer buffer){ rlm@0: ngetMainSamples(this.deviceID, buffer, buffer.position()); rlm@0: } rlm@0: public static native void ngetMainSamples(long device, ByteBuffer buffer, int position); rlm@0: rlm@0: rlm@0: public void getAuxSamples(ByteBuffer buffer){ rlm@0: ngetAuxSamples(this.deviceID, buffer, buffer.position()); rlm@0: } rlm@0: public static native void ngetAuxSamples(long device, ByteBuffer buffer, int position); rlm@0: rlm@0: rlm@0: rlm@0: public void initialize(){ rlm@0: if (!audioThread.isAlive()){ rlm@0: audioThread.setDaemon(true); rlm@0: audioThread.setPriority(Thread.NORM_PRIORITY+1); rlm@0: audioThread.start(); rlm@0: }else{ rlm@0: throw new IllegalStateException("Initialize already called"); rlm@0: } rlm@0: } rlm@0: rlm@0: private void checkDead(){ rlm@0: if (audioThread.getState() == Thread.State.TERMINATED) rlm@0: throw new IllegalStateException("Audio thread is terminated"); rlm@0: } rlm@0: rlm@0: public void run(){ rlm@0: initInThread(); rlm@0: synchronized (threadLock){ rlm@0: threadLock.set(true); rlm@0: threadLock.notifyAll(); rlm@0: } rlm@0: rlm@0: rlm@0: helloEveryone(); rlm@0: System.out.println("AudioRecorder: Trying to call native methods."); rlm@0: System.out.println("our device ID is : " + this.deviceID); rlm@0: rlm@0: rlm@0: rlm@0: rlm@0: long updateRateNanos = (long) (UPDATE_RATE * 1000000000); rlm@0: mainloop: while (true){ rlm@0: long startTime = System.nanoTime(); rlm@0: rlm@0: if (Thread.interrupted()) rlm@0: break; rlm@0: rlm@0: synchronized (threadLock){ rlm@0: updateInThread(UPDATE_RATE); rlm@0: } rlm@0: rlm@0: long endTime = System.nanoTime(); rlm@0: long diffTime = endTime - startTime; rlm@0: rlm@0: if (diffTime < updateRateNanos){ rlm@0: long desiredEndTime = startTime + updateRateNanos; rlm@0: while (System.nanoTime() < desiredEndTime){ rlm@0: try{ rlm@0: Thread.sleep(1); rlm@0: }catch (InterruptedException ex){ rlm@0: break mainloop; rlm@0: } rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: synchronized (threadLock){ rlm@0: cleanupInThread(); rlm@0: } rlm@0: } rlm@0: rlm@0: public void initInThread(){ rlm@0: try{ rlm@0: if (!AL.isCreated()){ rlm@0: AL.create("Aurellem", 44100, 15, false); rlm@0: } rlm@0: }catch (OpenALException ex){ rlm@0: logger.log(Level.SEVERE, "Failed to load audio library", ex); rlm@0: audioDisabled = true; rlm@0: return; rlm@0: }catch (LWJGLException ex){ rlm@0: logger.log(Level.SEVERE, "Failed to load audio library", ex); rlm@0: audioDisabled = true; rlm@0: return; rlm@0: } rlm@0: rlm@0: ALCdevice device = AL.getDevice(); rlm@0: rlm@0: // RLM: use reflection to grab the ID of our device for use later. rlm@0: try { rlm@0: Field deviceIDField; rlm@0: deviceIDField = ALCdevice.class.getDeclaredField("device"); rlm@0: deviceIDField.setAccessible(true); rlm@0: try {deviceID = (Long)deviceIDField.get(device);} rlm@0: catch (IllegalArgumentException e) {e.printStackTrace();} rlm@0: catch (IllegalAccessException e) {e.printStackTrace();} rlm@0: deviceIDField.setAccessible(false);} rlm@0: catch (SecurityException e) {e.printStackTrace();} rlm@0: catch (NoSuchFieldException e) {e.printStackTrace();} rlm@0: rlm@0: rlm@0: rlm@0: String deviceName = ALC10.alcGetString(device, ALC10.ALC_DEVICE_SPECIFIER); rlm@0: rlm@0: logger.log(Level.FINER, "Audio Device: {0}", deviceName); rlm@0: logger.log(Level.FINER, "Audio Vendor: {0}", alGetString(AL_VENDOR)); rlm@0: logger.log(Level.FINER, "Audio Renderer: {0}", alGetString(AL_RENDERER)); rlm@0: logger.log(Level.FINER, "Audio Version: {0}", alGetString(AL_VERSION)); rlm@0: rlm@0: // Find maximum # of sources supported by this implementation rlm@0: // RLM: this may not be wise -- exceeding the number of available channels rlm@0: // can crash some versions of OpenAL rlm@0: ArrayList channelList = new ArrayList(); rlm@0: for (int i = 0; i < MAX_NUM_CHANNELS; i++){ rlm@0: int chan = alGenSources(); rlm@0: if (alGetError() != 0){ rlm@0: break; rlm@0: }else{ rlm@0: channelList.add(chan); rlm@0: } rlm@0: } rlm@0: rlm@0: channels = new int[channelList.size()]; rlm@0: for (int i = 0; i < channels.length; i++){ rlm@0: channels[i] = channelList.get(i); rlm@0: } rlm@0: rlm@0: ib = BufferUtils.createIntBuffer(channels.length); rlm@0: chanSrcs = new AudioNode[channels.length]; rlm@0: rlm@0: logger.log(Level.INFO, "AudioRenderer supports {0} channels", channels.length); rlm@0: rlm@0: supportEfx = ALC10.alcIsExtensionPresent(device, "ALC_EXT_EFX"); rlm@0: // RLM: disable this for now. rlm@0: supportEfx = false; rlm@0: logger.log(Level.FINER, "Audio EFX support: {0}", supportEfx); rlm@0: rlm@0: if (supportEfx){ rlm@0: ib.position(0).limit(1); rlm@0: ALC10.alcGetInteger(device, EFX10.ALC_EFX_MAJOR_VERSION, ib); rlm@0: int major = ib.get(0); rlm@0: ib.position(0).limit(1); rlm@0: ALC10.alcGetInteger(device, EFX10.ALC_EFX_MINOR_VERSION, ib); rlm@0: int minor = ib.get(0); rlm@0: logger.log(Level.INFO, "Audio effect extension version: {0}.{1}", new Object[]{major, minor}); rlm@0: rlm@0: ALC10.alcGetInteger(device, EFX10.ALC_MAX_AUXILIARY_SENDS, ib); rlm@0: auxSends = ib.get(0); rlm@0: logger.log(Level.INFO, "Audio max auxilary sends: {0}", auxSends); rlm@0: rlm@0: // create slot rlm@0: ib.position(0).limit(1); rlm@0: EFX10.alGenAuxiliaryEffectSlots(ib); rlm@0: reverbFxSlot = ib.get(0); rlm@0: rlm@0: // create effect rlm@0: ib.position(0).limit(1); rlm@0: EFX10.alGenEffects(ib); rlm@0: reverbFx = ib.get(0); rlm@0: EFX10.alEffecti(reverbFx, EFX10.AL_EFFECT_TYPE, EFX10.AL_EFFECT_REVERB); rlm@0: rlm@0: // attach reverb effect to effect slot rlm@0: // EFX10.alAuxiliaryEffectSloti(reverbFxSlot, EFX10.AL_EFFECTSLOT_EFFECT, reverbFx); rlm@0: } rlm@0: } rlm@0: rlm@0: public void cleanupInThread(){ rlm@0: rlm@0: rlm@0: if (audioDisabled){ rlm@0: AL.destroy(); rlm@0: return; rlm@0: } rlm@0: rlm@0: // delete channel-based sources rlm@0: ib.clear(); rlm@0: ib.put(channels); rlm@0: ib.flip(); rlm@0: alDeleteSources(ib); rlm@0: rlm@0: if (supportEfx){ rlm@0: ib.position(0).limit(1); rlm@0: ib.put(0, reverbFx); rlm@0: EFX10.alDeleteEffects(ib); rlm@0: rlm@0: ib.position(0).limit(1); rlm@0: ib.put(0, reverbFxSlot); rlm@0: EFX10.alDeleteAuxiliaryEffectSlots(ib); rlm@0: } rlm@0: rlm@0: // XXX: Delete other buffers/sources rlm@0: AL.destroy(); rlm@0: } rlm@0: rlm@0: public void cleanup(){ rlm@0: // kill audio thread rlm@0: rlm@0: if (audioThread.isAlive()){ rlm@0: audioThread.interrupt(); rlm@0: } rlm@0: rlm@0: Byte[] data1 = new Byte[this.fullWaveData1.size()]; rlm@0: data1 = this.fullWaveData1.toArray(data1); rlm@0: rlm@0: Byte[] data2 = new Byte[this.fullWaveData2.size()]; rlm@0: data2 = this.fullWaveData2.toArray(data2); rlm@0: System.out.println(this.fullWaveData1.size()); rlm@0: System.out.println("Saving WAVE data!"); rlm@0: /*for (int i = 0; i < data1.length;i++){ rlm@0: System.out.print(data1[i]+","); rlm@0: if (i%32 ==0){System.out.println();} rlm@0: } rlm@0: */ rlm@0: rlm@0: rlm@0: StdAudio.save("/home/r/wave-output/data2.wav", data2); rlm@0: StdAudio.save("/home/r/wave-output/data1.wav", data1); rlm@0: } rlm@0: rlm@0: private void updateFilter(Filter f){ rlm@0: int id = f.getId(); rlm@0: if (id == -1){ rlm@0: ib.position(0).limit(1); rlm@0: EFX10.alGenFilters(ib); rlm@0: id = ib.get(0); rlm@0: f.setId(id); rlm@0: } rlm@0: rlm@0: if (f instanceof LowPassFilter){ rlm@0: LowPassFilter lpf = (LowPassFilter) f; rlm@0: EFX10.alFilteri(id, EFX10.AL_FILTER_TYPE, EFX10.AL_FILTER_LOWPASS); rlm@0: EFX10.alFilterf(id, EFX10.AL_LOWPASS_GAIN, lpf.getVolume()); rlm@0: EFX10.alFilterf(id, EFX10.AL_LOWPASS_GAINHF, lpf.getHighFreqVolume()); rlm@0: }else{ rlm@0: throw new UnsupportedOperationException("Filter type unsupported: "+ rlm@0: f.getClass().getName()); rlm@0: } rlm@0: rlm@0: f.clearUpdateNeeded(); rlm@0: } rlm@0: rlm@0: public void updateSourceParam(AudioNode src, AudioParam param){ rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: // There is a race condition in AudioNode that can rlm@0: // cause this to be called for a node that has been rlm@0: // detached from its channel. For example, setVolume() rlm@0: // called from the render thread may see that that AudioNode rlm@0: // still has a channel value but the audio thread may rlm@0: // clear that channel before setVolume() gets to call rlm@0: // updateSourceParam() (because the audio stopped playing rlm@0: // on its own right as the volume was set). In this case, rlm@0: // it should be safe to just ignore the update rlm@0: if (src.getChannel() < 0) rlm@0: return; rlm@0: rlm@0: assert src.getChannel() >= 0; rlm@0: rlm@0: int id = channels[src.getChannel()]; rlm@0: switch (param){ rlm@0: case Position: rlm@0: if (!src.isPositional()) rlm@0: return; rlm@0: rlm@0: Vector3f pos = src.getWorldTranslation(); rlm@0: alSource3f(id, AL_POSITION, pos.x, pos.y, pos.z); rlm@0: break; rlm@0: case Velocity: rlm@0: if (!src.isPositional()) rlm@0: return; rlm@0: rlm@0: Vector3f vel = src.getVelocity(); rlm@0: alSource3f(id, AL_VELOCITY, vel.x, vel.y, vel.z); rlm@0: break; rlm@0: case MaxDistance: rlm@0: if (!src.isPositional()) rlm@0: return; rlm@0: rlm@0: alSourcef(id, AL_MAX_DISTANCE, src.getMaxDistance()); rlm@0: break; rlm@0: case RefDistance: rlm@0: if (!src.isPositional()) rlm@0: return; rlm@0: rlm@0: alSourcef(id, AL_REFERENCE_DISTANCE, src.getRefDistance()); rlm@0: break; rlm@0: case ReverbFilter: rlm@0: if (!src.isPositional() || !src.isReverbEnabled()) rlm@0: return; rlm@0: rlm@0: int filter = EFX10.AL_FILTER_NULL; rlm@0: if (src.getReverbFilter() != null){ rlm@0: Filter f = src.getReverbFilter(); rlm@0: if (f.isUpdateNeeded()){ rlm@0: updateFilter(f); rlm@0: } rlm@0: filter = f.getId(); rlm@0: } rlm@0: AL11.alSource3i(id, EFX10.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter); rlm@0: break; rlm@0: case ReverbEnabled: rlm@0: if (!src.isPositional()) rlm@0: return; rlm@0: rlm@0: if (src.isReverbEnabled()){ rlm@0: updateSourceParam(src, AudioParam.ReverbFilter); rlm@0: }else{ rlm@0: AL11.alSource3i(id, EFX10.AL_AUXILIARY_SEND_FILTER, 0, 0, EFX10.AL_FILTER_NULL); rlm@0: } rlm@0: break; rlm@0: case IsPositional: rlm@0: if (!src.isPositional()){ rlm@0: // play in headspace rlm@0: alSourcei(id, AL_SOURCE_RELATIVE, AL_TRUE); rlm@0: alSource3f(id, AL_POSITION, 0,0,0); rlm@0: alSource3f(id, AL_VELOCITY, 0,0,0); rlm@0: }else{ rlm@0: alSourcei(id, AL_SOURCE_RELATIVE, AL_FALSE); rlm@0: updateSourceParam(src, AudioParam.Position); rlm@0: updateSourceParam(src, AudioParam.Velocity); rlm@0: updateSourceParam(src, AudioParam.MaxDistance); rlm@0: updateSourceParam(src, AudioParam.RefDistance); rlm@0: updateSourceParam(src, AudioParam.ReverbEnabled); rlm@0: } rlm@0: break; rlm@0: case Direction: rlm@0: if (!src.isDirectional()) rlm@0: return; rlm@0: rlm@0: Vector3f dir = src.getDirection(); rlm@0: alSource3f(id, AL_DIRECTION, dir.x, dir.y, dir.z); rlm@0: break; rlm@0: case InnerAngle: rlm@0: if (!src.isDirectional()) rlm@0: return; rlm@0: rlm@0: alSourcef(id, AL_CONE_INNER_ANGLE, src.getInnerAngle()); rlm@0: break; rlm@0: case OuterAngle: rlm@0: if (!src.isDirectional()) rlm@0: return; rlm@0: rlm@0: alSourcef(id, AL_CONE_OUTER_ANGLE, src.getOuterAngle()); rlm@0: break; rlm@0: case IsDirectional: rlm@0: if (src.isDirectional()){ rlm@0: updateSourceParam(src, AudioParam.Direction); rlm@0: updateSourceParam(src, AudioParam.InnerAngle); rlm@0: updateSourceParam(src, AudioParam.OuterAngle); rlm@0: alSourcef(id, AL_CONE_OUTER_GAIN, 0); rlm@0: }else{ rlm@0: alSourcef(id, AL_CONE_INNER_ANGLE, 360); rlm@0: alSourcef(id, AL_CONE_OUTER_ANGLE, 360); rlm@0: alSourcef(id, AL_CONE_OUTER_GAIN, 1f); rlm@0: } rlm@0: break; rlm@0: case DryFilter: rlm@0: if (src.getDryFilter() != null){ rlm@0: Filter f = src.getDryFilter(); rlm@0: if (f.isUpdateNeeded()){ rlm@0: updateFilter(f); rlm@0: rlm@0: // NOTE: must re-attach filter for changes to apply. rlm@0: alSourcei(id, EFX10.AL_DIRECT_FILTER, f.getId()); rlm@0: } rlm@0: }else{ rlm@0: alSourcei(id, EFX10.AL_DIRECT_FILTER, EFX10.AL_FILTER_NULL); rlm@0: } rlm@0: break; rlm@0: case Looping: rlm@0: if (src.isLooping()){ rlm@0: if (!(src.getAudioData() instanceof AudioStream)){ rlm@0: alSourcei(id, AL_LOOPING, AL_TRUE); rlm@0: } rlm@0: }else{ rlm@0: alSourcei(id, AL_LOOPING, AL_FALSE); rlm@0: } rlm@0: break; rlm@0: case Volume: rlm@0: alSourcef(id, AL_GAIN, src.getVolume()); rlm@0: break; rlm@0: case Pitch: rlm@0: alSourcef(id, AL_PITCH, src.getPitch()); rlm@0: break; rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: private void setSourceParams(int id, AudioNode src, boolean forceNonLoop){ rlm@0: if (src.isPositional()){ rlm@0: Vector3f pos = src.getWorldTranslation(); rlm@0: Vector3f vel = src.getVelocity(); rlm@0: alSource3f(id, AL_POSITION, pos.x, pos.y, pos.z); rlm@0: alSource3f(id, AL_VELOCITY, vel.x, vel.y, vel.z); rlm@0: alSourcef(id, AL_MAX_DISTANCE, src.getMaxDistance()); rlm@0: alSourcef(id, AL_REFERENCE_DISTANCE, src.getRefDistance()); rlm@0: alSourcei(id, AL_SOURCE_RELATIVE, AL_FALSE); rlm@0: rlm@0: if (src.isReverbEnabled()){ rlm@0: int filter = EFX10.AL_FILTER_NULL; rlm@0: if (src.getReverbFilter() != null){ rlm@0: Filter f = src.getReverbFilter(); rlm@0: if (f.isUpdateNeeded()){ rlm@0: updateFilter(f); rlm@0: } rlm@0: filter = f.getId(); rlm@0: } rlm@0: AL11.alSource3i(id, EFX10.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter); rlm@0: } rlm@0: }else{ rlm@0: // play in headspace rlm@0: alSourcei(id, AL_SOURCE_RELATIVE, AL_TRUE); rlm@0: alSource3f(id, AL_POSITION, 0,0,0); rlm@0: alSource3f(id, AL_VELOCITY, 0,0,0); rlm@0: } rlm@0: rlm@0: if (src.getDryFilter() != null){ rlm@0: Filter f = src.getDryFilter(); rlm@0: if (f.isUpdateNeeded()){ rlm@0: updateFilter(f); rlm@0: rlm@0: // NOTE: must re-attach filter for changes to apply. rlm@0: alSourcei(id, EFX10.AL_DIRECT_FILTER, f.getId()); rlm@0: } rlm@0: } rlm@0: rlm@0: if (forceNonLoop){ rlm@0: alSourcei(id, AL_LOOPING, AL_FALSE); rlm@0: }else{ rlm@0: alSourcei(id, AL_LOOPING, src.isLooping() ? AL_TRUE : AL_FALSE); rlm@0: } rlm@0: alSourcef(id, AL_GAIN, src.getVolume()); rlm@0: alSourcef(id, AL_PITCH, src.getPitch()); rlm@0: alSourcef(id, AL11.AL_SEC_OFFSET, src.getTimeOffset()); rlm@0: rlm@0: if (src.isDirectional()){ rlm@0: Vector3f dir = src.getDirection(); rlm@0: alSource3f(id, AL_DIRECTION, dir.x, dir.y, dir.z); rlm@0: alSourcef(id, AL_CONE_INNER_ANGLE, src.getInnerAngle()); rlm@0: alSourcef(id, AL_CONE_OUTER_ANGLE, src.getOuterAngle()); rlm@0: alSourcef(id, AL_CONE_OUTER_GAIN, 0); rlm@0: }else{ rlm@0: alSourcef(id, AL_CONE_INNER_ANGLE, 360); rlm@0: alSourcef(id, AL_CONE_OUTER_ANGLE, 360); rlm@0: alSourcef(id, AL_CONE_OUTER_GAIN, 1f); rlm@0: } rlm@0: } rlm@0: rlm@0: public void updateListenerParam(Listener listener, ListenerParam param){ rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: switch (param){ rlm@0: case Position: rlm@0: Vector3f pos = listener.getLocation(); rlm@0: alListener3f(AL_POSITION, pos.x, pos.y, pos.z); rlm@0: break; rlm@0: case Rotation: rlm@0: Vector3f dir = listener.getDirection(); rlm@0: Vector3f up = listener.getUp(); rlm@0: fb.rewind(); rlm@0: fb.put(dir.x).put(dir.y).put(dir.z); rlm@0: fb.put(up.x).put(up.y).put(up.z); rlm@0: fb.flip(); rlm@0: alListener(AL_ORIENTATION, fb); rlm@0: break; rlm@0: case Velocity: rlm@0: Vector3f vel = listener.getVelocity(); rlm@0: alListener3f(AL_VELOCITY, vel.x, vel.y, vel.z); rlm@0: break; rlm@0: case Volume: rlm@0: alListenerf(AL_GAIN, listener.getVolume()); rlm@0: break; rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: private void setListenerParams(Listener listener){ rlm@0: Vector3f pos = listener.getLocation(); rlm@0: Vector3f vel = listener.getVelocity(); rlm@0: Vector3f dir = listener.getDirection(); rlm@0: Vector3f up = listener.getUp(); rlm@0: rlm@0: alListener3f(AL_POSITION, pos.x, pos.y, pos.z); rlm@0: alListener3f(AL_VELOCITY, vel.x, vel.y, vel.z); rlm@0: fb.rewind(); rlm@0: fb.put(dir.x).put(dir.y).put(dir.z); rlm@0: fb.put(up.x).put(up.y).put(up.z); rlm@0: fb.flip(); rlm@0: alListener(AL_ORIENTATION, fb); rlm@0: alListenerf(AL_GAIN, listener.getVolume()); rlm@0: } rlm@0: rlm@0: private int newChannel(){ rlm@0: if (freeChans.size() > 0) rlm@0: return freeChans.remove(0); rlm@0: else if (nextChan < channels.length){ rlm@0: return nextChan++; rlm@0: }else{ rlm@0: return -1; rlm@0: } rlm@0: } rlm@0: rlm@0: private void freeChannel(int index){ rlm@0: if (index == nextChan-1){ rlm@0: nextChan--; rlm@0: } else{ rlm@0: freeChans.add(index); rlm@0: } rlm@0: } rlm@0: rlm@0: public void setEnvironment(Environment env){ rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DENSITY, env.getDensity()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DIFFUSION, env.getDiffusion()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_GAIN, env.getGain()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_GAINHF, env.getGainHf()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DECAY_TIME, env.getDecayTime()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_DECAY_HFRATIO, env.getDecayHFRatio()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_REFLECTIONS_GAIN, env.getReflectGain()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_REFLECTIONS_DELAY, env.getReflectDelay()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_LATE_REVERB_GAIN, env.getLateReverbGain()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_LATE_REVERB_DELAY, env.getLateReverbDelay()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_AIR_ABSORPTION_GAINHF, env.getAirAbsorbGainHf()); rlm@0: EFX10.alEffectf(reverbFx, EFX10.AL_REVERB_ROOM_ROLLOFF_FACTOR, env.getRoomRolloffFactor()); rlm@0: rlm@0: // attach effect to slot rlm@0: EFX10.alAuxiliaryEffectSloti(reverbFxSlot, EFX10.AL_EFFECTSLOT_EFFECT, reverbFx); rlm@0: } rlm@0: } rlm@0: rlm@0: private boolean fillBuffer(AudioStream stream, int id){ rlm@0: int size = 0; rlm@0: int result; rlm@0: rlm@0: while (size < arrayBuf.length){ rlm@0: result = stream.readSamples(arrayBuf, size, arrayBuf.length - size); rlm@0: rlm@0: if(result > 0){ rlm@0: size += result; rlm@0: }else{ rlm@0: break; rlm@0: } rlm@0: } rlm@0: rlm@0: if(size == 0) rlm@0: return false; rlm@0: rlm@0: nativeBuf.clear(); rlm@0: nativeBuf.put(arrayBuf, 0, size); rlm@0: nativeBuf.flip(); rlm@0: rlm@0: alBufferData(id, convertFormat(stream), nativeBuf, stream.getSampleRate()); rlm@0: rlm@0: return true; rlm@0: } rlm@0: rlm@0: private boolean fillStreamingSource(int sourceId, AudioStream stream){ rlm@0: if (!stream.isOpen()) rlm@0: return false; rlm@0: rlm@0: boolean active = true; rlm@0: int processed = alGetSourcei(sourceId, AL_BUFFERS_PROCESSED); rlm@0: rlm@0: // while((processed--) != 0){ rlm@0: if (processed > 0){ rlm@0: int buffer; rlm@0: rlm@0: ib.position(0).limit(1); rlm@0: alSourceUnqueueBuffers(sourceId, ib); rlm@0: buffer = ib.get(0); rlm@0: rlm@0: active = fillBuffer(stream, buffer); rlm@0: rlm@0: ib.position(0).limit(1); rlm@0: ib.put(0, buffer); rlm@0: alSourceQueueBuffers(sourceId, ib); rlm@0: } rlm@0: rlm@0: if (!active && stream.isOpen()) rlm@0: stream.close(); rlm@0: rlm@0: return active; rlm@0: } rlm@0: rlm@0: private boolean attachStreamToSource(int sourceId, AudioStream stream){ rlm@0: boolean active = true; rlm@0: for (int id : stream.getIds()){ rlm@0: active = fillBuffer(stream, id); rlm@0: ib.position(0).limit(1); rlm@0: ib.put(id).flip(); rlm@0: alSourceQueueBuffers(sourceId, ib); rlm@0: } rlm@0: return active; rlm@0: } rlm@0: rlm@0: private boolean attachBufferToSource(int sourceId, AudioBuffer buffer){ rlm@0: alSourcei(sourceId, AL_BUFFER, buffer.getId()); rlm@0: return true; rlm@0: } rlm@0: rlm@0: private boolean attachAudioToSource(int sourceId, AudioData data){ rlm@0: if (data instanceof AudioBuffer){ rlm@0: return attachBufferToSource(sourceId, (AudioBuffer) data); rlm@0: }else if (data instanceof AudioStream){ rlm@0: return attachStreamToSource(sourceId, (AudioStream) data); rlm@0: } rlm@0: throw new UnsupportedOperationException(); rlm@0: } rlm@0: rlm@0: private void clearChannel(int index){ rlm@0: // make room at this channel rlm@0: if (chanSrcs[index] != null){ rlm@0: AudioNode src = chanSrcs[index]; rlm@0: rlm@0: int sourceId = channels[index]; rlm@0: alSourceStop(sourceId); rlm@0: rlm@0: if (src.getAudioData() instanceof AudioStream){ rlm@0: AudioStream str = (AudioStream) src.getAudioData(); rlm@0: ib.position(0).limit(STREAMING_BUFFER_COUNT); rlm@0: ib.put(str.getIds()).flip(); rlm@0: alSourceUnqueueBuffers(sourceId, ib); rlm@0: }else if (src.getAudioData() instanceof AudioBuffer){ rlm@0: alSourcei(sourceId, AL_BUFFER, 0); rlm@0: } rlm@0: rlm@0: if (src.getDryFilter() != null){ rlm@0: // detach filter rlm@0: alSourcei(sourceId, EFX10.AL_DIRECT_FILTER, EFX10.AL_FILTER_NULL); rlm@0: } rlm@0: if (src.isPositional()){ rlm@0: AudioNode pas = (AudioNode) src; rlm@0: if (pas.isReverbEnabled()) { rlm@0: AL11.alSource3i(sourceId, EFX10.AL_AUXILIARY_SEND_FILTER, 0, 0, EFX10.AL_FILTER_NULL); rlm@0: } rlm@0: } rlm@0: rlm@0: chanSrcs[index] = null; rlm@0: } rlm@0: } rlm@0: rlm@0: public void update(float tpf){ rlm@0: //ByteBuffer test = BufferUtils.createByteBuffer(1); rlm@0: //AurellemTransport.getAuxSamples(AL.getDevice(), test); rlm@0: } rlm@0: rlm@0: Vector fullWaveData1 = new Vector(); rlm@0: Vector fullWaveData2 = new Vector(); rlm@0: rlm@0: public void updateInThread(float tpf){ rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: step(); rlm@0: ByteBuffer test = BufferUtils.createByteBuffer(4096); rlm@0: test.clear(); rlm@0: this.getMainSamples(test); rlm@0: byte[] waveData = new byte[4096]; rlm@0: test.get(waveData, 0, 4096); rlm@0: //System.out.println("J DATA:"); rlm@0: /*for (int j = 0; j < 1; j++){ rlm@0: for(int i = 64 * j; i < (64*j) + 64; i++){ rlm@0: System.out.print(waveData[i]); rlm@0: } rlm@0: System.out.println(); rlm@0: }*/ rlm@0: rlm@0: ByteBuffer test2 = BufferUtils.createByteBuffer(4096); rlm@0: test2.clear(); rlm@0: this.getAuxSamples(test2); rlm@0: byte[] waveData2 = new byte[4096]; rlm@0: test2.get(waveData2, 0, 4096); rlm@0: //System.out.print("wave1:"); rlm@0: //for (int j = 0; j< 32; j++){ rlm@0: // System.out.print(waveData[j]+","); rlm@0: // } rlm@0: //System.out.println(); rlm@0: //System.out.print("wave2:"); rlm@0: // for (int j = 0; j< 4096; j++){ rlm@0: // if (waveData2[j] != 0){ rlm@0: // System.out.println("fucked at : " + j); rlm@0: // } rlm@0: rlm@0: /* System.out.print(waveData2[j]+","); rlm@0: if (0 == (j % 64)){System.out.println();}*/ rlm@0: //} rlm@0: //System.out.println(); rlm@0: rlm@0: for (byte b : waveData){ rlm@0: this.fullWaveData1.add(b); rlm@0: } rlm@0: rlm@0: for (byte b : waveData2){ rlm@0: this.fullWaveData2.add(b); rlm@0: } rlm@0: rlm@0: rlm@0: for (int i = 0; i < channels.length; i++){ rlm@0: AudioNode src = chanSrcs[i]; rlm@0: if (src == null) rlm@0: continue; rlm@0: rlm@0: int sourceId = channels[i]; rlm@0: rlm@0: // is the source bound to this channel rlm@0: // if false, it's an instanced playback rlm@0: boolean boundSource = i == src.getChannel(); rlm@0: rlm@0: // source's data is streaming rlm@0: boolean streaming = src.getAudioData() instanceof AudioStream; rlm@0: rlm@0: // only buffered sources can be bound rlm@0: assert (boundSource && streaming) || (!streaming); rlm@0: rlm@0: int state = alGetSourcei(sourceId, AL_SOURCE_STATE); rlm@0: boolean wantPlaying = src.getStatus() == Status.Playing; rlm@0: boolean stopped = state == AL_STOPPED; rlm@0: rlm@0: if (streaming && wantPlaying){ rlm@0: AudioStream stream = (AudioStream) src.getAudioData(); rlm@0: if (stream.isOpen()){ rlm@0: fillStreamingSource(sourceId, stream); rlm@0: if (stopped) rlm@0: alSourcePlay(sourceId); rlm@0: rlm@0: }else{ rlm@0: if (stopped){ rlm@0: // became inactive rlm@0: src.setStatus(Status.Stopped); rlm@0: src.setChannel(-1); rlm@0: clearChannel(i); rlm@0: freeChannel(i); rlm@0: rlm@0: // And free the audio since it cannot be rlm@0: // played again anyway. rlm@0: deleteAudioData(stream); rlm@0: } rlm@0: } rlm@0: }else if (!streaming){ rlm@0: boolean paused = state == AL_PAUSED; rlm@0: rlm@0: // make sure OAL pause state & source state coincide rlm@0: assert (src.getStatus() == Status.Paused && paused) || (!paused); rlm@0: rlm@0: if (stopped){ rlm@0: if (boundSource){ rlm@0: src.setStatus(Status.Stopped); rlm@0: src.setChannel(-1); rlm@0: } rlm@0: clearChannel(i); rlm@0: freeChannel(i); rlm@0: } rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: public void setListener(Listener listener) { rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: if (this.listener != null){ rlm@0: // previous listener no longer associated with current rlm@0: // renderer rlm@0: this.listener.setRenderer(null); rlm@0: } rlm@0: rlm@0: this.listener = listener; rlm@0: this.listener.setRenderer(this); rlm@0: setListenerParams(listener); rlm@0: } rlm@0: } rlm@0: rlm@0: public void playSourceInstance(AudioNode src){ rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: if (src.getAudioData() instanceof AudioStream) rlm@0: throw new UnsupportedOperationException( rlm@0: "Cannot play instances " + rlm@0: "of audio streams. Use playSource() instead."); rlm@0: rlm@0: if (src.getAudioData().isUpdateNeeded()){ rlm@0: updateAudioData(src.getAudioData()); rlm@0: } rlm@0: rlm@0: // create a new index for an audio-channel rlm@0: int index = newChannel(); rlm@0: if (index == -1) rlm@0: return; rlm@0: rlm@0: int sourceId = channels[index]; rlm@0: rlm@0: clearChannel(index); rlm@0: rlm@0: // set parameters, like position and max distance rlm@0: setSourceParams(sourceId, src, true); rlm@0: attachAudioToSource(sourceId, src.getAudioData()); rlm@0: chanSrcs[index] = src; rlm@0: rlm@0: // play the channel rlm@0: alSourcePlay(sourceId); rlm@0: } rlm@0: } rlm@0: rlm@0: rlm@0: public void playSource(AudioNode src) { rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: //assert src.getStatus() == Status.Stopped || src.getChannel() == -1; rlm@0: rlm@0: if (src.getStatus() == Status.Playing){ rlm@0: return; rlm@0: }else if (src.getStatus() == Status.Stopped){ rlm@0: rlm@0: // allocate channel to this source rlm@0: int index = newChannel(); rlm@0: if (index == -1) { rlm@0: logger.log(Level.WARNING, "No channel available to play {0}", src); rlm@0: return; rlm@0: } rlm@0: clearChannel(index); rlm@0: src.setChannel(index); rlm@0: rlm@0: AudioData data = src.getAudioData(); rlm@0: if (data.isUpdateNeeded()) rlm@0: updateAudioData(data); rlm@0: rlm@0: chanSrcs[index] = src; rlm@0: setSourceParams(channels[index], src, false); rlm@0: attachAudioToSource(channels[index], data); rlm@0: } rlm@0: rlm@0: alSourcePlay(channels[src.getChannel()]); rlm@0: src.setStatus(Status.Playing); rlm@0: } rlm@0: } rlm@0: rlm@0: rlm@0: public void pauseSource(AudioNode src) { rlm@0: checkDead(); rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: if (src.getStatus() == Status.Playing){ rlm@0: assert src.getChannel() != -1; rlm@0: rlm@0: alSourcePause(channels[src.getChannel()]); rlm@0: src.setStatus(Status.Paused); rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: rlm@0: public void stopSource(AudioNode src) { rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: if (src.getStatus() != Status.Stopped){ rlm@0: int chan = src.getChannel(); rlm@0: assert chan != -1; // if it's not stopped, must have id rlm@0: rlm@0: src.setStatus(Status.Stopped); rlm@0: src.setChannel(-1); rlm@0: clearChannel(chan); rlm@0: freeChannel(chan); rlm@0: rlm@0: if (src.getAudioData() instanceof AudioStream) { rlm@0: rlm@0: AudioStream stream = (AudioStream)src.getAudioData(); rlm@0: if (stream.isOpen()) { rlm@0: stream.close(); rlm@0: } rlm@0: rlm@0: // And free the audio since it cannot be rlm@0: // played again anyway. rlm@0: deleteAudioData(src.getAudioData()); rlm@0: } rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: private int convertFormat(AudioData ad){ rlm@0: switch (ad.getBitsPerSample()){ rlm@0: case 8: rlm@0: if (ad.getChannels() == 1) rlm@0: return AL_FORMAT_MONO8; rlm@0: else if (ad.getChannels() == 2) rlm@0: return AL_FORMAT_STEREO8; rlm@0: rlm@0: break; rlm@0: case 16: rlm@0: if (ad.getChannels() == 1) rlm@0: return AL_FORMAT_MONO16; rlm@0: else rlm@0: return AL_FORMAT_STEREO16; rlm@0: } rlm@0: throw new UnsupportedOperationException("Unsupported channels/bits combination: "+ rlm@0: "bits="+ad.getBitsPerSample()+", channels="+ad.getChannels()); rlm@0: } rlm@0: rlm@0: private void updateAudioBuffer(AudioBuffer ab){ rlm@0: int id = ab.getId(); rlm@0: if (ab.getId() == -1){ rlm@0: ib.position(0).limit(1); rlm@0: alGenBuffers(ib); rlm@0: id = ib.get(0); rlm@0: ab.setId(id); rlm@0: } rlm@0: rlm@0: ab.getData().clear(); rlm@0: alBufferData(id, convertFormat(ab), ab.getData(), ab.getSampleRate()); rlm@0: ab.clearUpdateNeeded(); rlm@0: } rlm@0: rlm@0: private void updateAudioStream(AudioStream as){ rlm@0: if (as.getIds() != null){ rlm@0: deleteAudioData(as); rlm@0: } rlm@0: rlm@0: int[] ids = new int[STREAMING_BUFFER_COUNT]; rlm@0: ib.position(0).limit(STREAMING_BUFFER_COUNT); rlm@0: alGenBuffers(ib); rlm@0: ib.position(0).limit(STREAMING_BUFFER_COUNT); rlm@0: ib.get(ids); rlm@0: rlm@0: as.setIds(ids); rlm@0: as.clearUpdateNeeded(); rlm@0: } rlm@0: rlm@0: private void updateAudioData(AudioData ad){ rlm@0: if (ad instanceof AudioBuffer){ rlm@0: updateAudioBuffer((AudioBuffer) ad); rlm@0: }else if (ad instanceof AudioStream){ rlm@0: updateAudioStream((AudioStream) ad); rlm@0: } rlm@0: } rlm@0: rlm@0: public void deleteAudioData(AudioData ad){ rlm@0: synchronized (threadLock){ rlm@0: while (!threadLock.get()){ rlm@0: try { rlm@0: threadLock.wait(); rlm@0: } catch (InterruptedException ex) { rlm@0: } rlm@0: } rlm@0: if (audioDisabled) rlm@0: return; rlm@0: rlm@0: if (ad instanceof AudioBuffer){ rlm@0: AudioBuffer ab = (AudioBuffer) ad; rlm@0: int id = ab.getId(); rlm@0: if (id != -1){ rlm@0: ib.put(0,id); rlm@0: ib.position(0).limit(1); rlm@0: alDeleteBuffers(ib); rlm@0: ab.resetObject(); rlm@0: } rlm@0: }else if (ad instanceof AudioStream){ rlm@0: AudioStream as = (AudioStream) ad; rlm@0: int[] ids = as.getIds(); rlm@0: if (ids != null){ rlm@0: ib.clear(); rlm@0: ib.put(ids).flip(); rlm@0: alDeleteBuffers(ib); rlm@0: as.resetObject(); rlm@0: } rlm@0: } rlm@0: } rlm@0: } rlm@0: rlm@0: }