changeset 9:5dfc9e768816

moved files
author Robert McIntyre <rlm@mit.edu>
date Wed, 26 Oct 2011 08:54:12 -0700
parents dde12be02029
children 4c5fc53778c1
files generate_headers.sh src/com/aurellem/capture/AVIOutputStream.java src/com/aurellem/capture/AVIVideoRecorder.java src/com/aurellem/capture/AbstractVideoRecorder.java src/com/aurellem/capture/AudioSend.java src/com/aurellem/capture/Capture.java src/com/aurellem/capture/DataChunkOutputStream.java src/com/aurellem/capture/FileAudioRenderer.java src/com/aurellem/capture/IVideoRecorder.java src/com/aurellem/capture/ImageOutputStreamAdapter.java src/com/aurellem/capture/Main.java src/com/aurellem/capture/MicrosoftRLEEncoder.java src/com/aurellem/capture/MultiListener.java src/com/aurellem/capture/SeekableByteArrayOutputStream.java src/com/aurellem/capture/SoundProcessor.java src/com/aurellem/capture/WaveFileWriter.java src/com/aurellem/capture/XuggleVideoRecorder.java src/com/aurellem/capture/audio/AudioSend.java src/com/aurellem/capture/audio/MultiListener.java src/com/aurellem/capture/audio/SeekableByteArrayOutputStream.java src/com/aurellem/capture/audio/SoundProcessor.java src/com/aurellem/capture/audio/WaveFileWriter.java src/com/aurellem/capture/hello/HelloAudio.java src/com/aurellem/capture/hello/HelloVideo.java src/com/aurellem/capture/hello/TestWrite.java src/com/aurellem/capture/video/AVIOutputStream.java src/com/aurellem/capture/video/AVIVideoRecorder.java src/com/aurellem/capture/video/AbstractVideoRecorder.java src/com/aurellem/capture/video/DataChunkOutputStream.java src/com/aurellem/capture/video/IVideoRecorder.java src/com/aurellem/capture/video/ImageOutputStreamAdapter.java src/com/aurellem/capture/video/MicrosoftRLEEncoder.java src/com/aurellem/capture/video/XuggleVideoRecorder.java test.sh
diffstat 34 files changed, 3096 insertions(+), 3148 deletions(-) [+]
line wrap: on
line diff
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/generate_headers.sh	Wed Oct 26 08:54:12 2011 -0700
     1.3 @@ -0,0 +1,1 @@
     1.4 +javah -classpath bin -d /home/r/proj/audio-send/OpenAL32/Include/ com.aurellem.capture.AudioSend
     2.1 --- a/src/com/aurellem/capture/AVIOutputStream.java	Tue Oct 25 12:29:40 2011 -0700
     2.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     2.3 @@ -1,1548 +0,0 @@
     2.4 -/**
     2.5 - * @(#)AVIOutputStream.java  1.5.1  2011-01-17
     2.6 - *
     2.7 - * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
     2.8 - * All rights reserved.
     2.9 - *
    2.10 - * You may not use, copy or modify this file, except in compliance with the
    2.11 - * license agreement you entered into with Werner Randelshofer.
    2.12 - * For details see accompanying license terms.
    2.13 - */
    2.14 -package com.aurellem.capture;
    2.15 -
    2.16 -import java.awt.Dimension;
    2.17 -import java.awt.image.BufferedImage;
    2.18 -import java.awt.image.DataBufferByte;
    2.19 -import java.awt.image.IndexColorModel;
    2.20 -import java.awt.image.WritableRaster;
    2.21 -import java.io.File;
    2.22 -import java.io.FileInputStream;
    2.23 -import java.io.IOException;
    2.24 -import java.io.InputStream;
    2.25 -import java.io.OutputStream;
    2.26 -import java.util.Arrays;
    2.27 -import java.util.Date;
    2.28 -import java.util.LinkedList;
    2.29 -
    2.30 -import javax.imageio.IIOImage;
    2.31 -import javax.imageio.ImageIO;
    2.32 -import javax.imageio.ImageWriteParam;
    2.33 -import javax.imageio.ImageWriter;
    2.34 -import javax.imageio.stream.FileImageOutputStream;
    2.35 -import javax.imageio.stream.ImageOutputStream;
    2.36 -import javax.imageio.stream.MemoryCacheImageOutputStream;
    2.37 -
    2.38 -/**
    2.39 - * This class supports writing of images into an AVI 1.0 video file.
    2.40 - * <p>
    2.41 - * The images are written as video frames.
    2.42 - * <p>
    2.43 - * Video frames can be encoded with one of the following formats:
    2.44 - * <ul>
    2.45 - * <li>JPEG</li>
    2.46 - * <li>PNG</li>
    2.47 - * <li>RAW</li>
    2.48 - * <li>RLE</li>
    2.49 - * </ul>
    2.50 - * All frames must have the same format.
    2.51 - * When JPG is used each frame can have an individual encoding quality.
    2.52 - * <p>
    2.53 - * All frames in an AVI file must have the same duration. The duration can
    2.54 - * be set by setting an appropriate pair of values using methods
    2.55 - * {@link #setFrameRate} and {@link #setTimeScale}.
    2.56 - * <p>
    2.57 - * The length of an AVI 1.0 file is limited to 1 GB.
    2.58 - * This class supports lengths of up to 4 GB, but such files may not work on
    2.59 - * all players.
    2.60 - * <p>
    2.61 - * For detailed information about the AVI RIFF file format see:<br>
    2.62 - * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
    2.63 - * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
    2.64 - * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
    2.65 - *
    2.66 - * @author Werner Randelshofer
    2.67 - * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
    2.68 - * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
    2.69 - * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
    2.70 - * in "idx1" chunk.
    2.71 - * <br>1.3.2 2010-12-27 File size limit is 1 GB.
    2.72 - * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
    2.73 - * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
    2.74 - * Added method getVideoDimension().
    2.75 - * <br>1.2 2009-08-29 Adds support for RAW video format.
    2.76 - * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
    2.77 - * chunk. Changed the API to reflect that AVI works with frame rates instead of
    2.78 - * with frame durations.
    2.79 - * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
    2.80 - * encoded video.
    2.81 - * <br>1.0 2008-08-11 Created.
    2.82 - */
    2.83 -public class AVIOutputStream {
    2.84 -
    2.85 -    /**
    2.86 -     * Underlying output stream.
    2.87 -     */
    2.88 -    private ImageOutputStream out;
    2.89 -    /** The offset of the QuickTime stream in the underlying ImageOutputStream.
    2.90 -     * Normally this is 0 unless the underlying stream already contained data
    2.91 -     * when it was passed to the constructor.
    2.92 -     */
    2.93 -    private long streamOffset;
    2.94 -    /** Previous frame for delta compression. */
    2.95 -    private Object previousData;
    2.96 -
    2.97 -    /**
    2.98 -     * Supported video encodings.
    2.99 -     */
   2.100 -    public static enum VideoFormat {
   2.101 -
   2.102 -        RAW, RLE, JPG, PNG;
   2.103 -    }
   2.104 -    /**
   2.105 -     * Current video formats.
   2.106 -     */
   2.107 -    private VideoFormat videoFormat;
   2.108 -    /**
   2.109 -     * Quality of JPEG encoded video frames.
   2.110 -     */
   2.111 -    private float quality = 0.9f;
   2.112 -    /**
   2.113 -     * Creation time of the movie output stream.
   2.114 -     */
   2.115 -    private Date creationTime;
   2.116 -    /**
   2.117 -     * Width of the video frames. All frames must have the same width.
   2.118 -     * The value -1 is used to mark unspecified width.
   2.119 -     */
   2.120 -    private int imgWidth = -1;
   2.121 -    /**
   2.122 -     * Height of the video frames. All frames must have the same height.
   2.123 -     * The value -1 is used to mark unspecified height.
   2.124 -     */
   2.125 -    private int imgHeight = -1;
   2.126 -    /** Number of bits per pixel. */
   2.127 -    private int imgDepth = 24;
   2.128 -    /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
   2.129 -    private IndexColorModel palette;
   2.130 -    private IndexColorModel previousPalette;
   2.131 -    /** Video encoder. */
   2.132 -    
   2.133 -    /**
   2.134 -     * The timeScale of the movie.
   2.135 -     * <p>
   2.136 -     * Used with frameRate to specify the time scale that this stream will use.
   2.137 -     * Dividing frameRate by timeScale gives the number of samples per second.
   2.138 -     * For video streams, this is the frame rate. For audio streams, this rate
   2.139 -     * corresponds to the time needed to play nBlockAlign bytes of audio, which
   2.140 -     * for PCM audio is the just the sample rate.
   2.141 -     */
   2.142 -    private int timeScale = 1;
   2.143 -    /**
   2.144 -     * The frameRate of the movie in timeScale units.
   2.145 -     * <p>
   2.146 -     * @see timeScale
   2.147 -     */
   2.148 -    private int frameRate = 30;
   2.149 -    /** Interval between keyframes. */
   2.150 -    private int syncInterval = 30;
   2.151 -
   2.152 -    /**
   2.153 -     * The states of the movie output stream.
   2.154 -     */
   2.155 -    private static enum States {
   2.156 -
   2.157 -        STARTED, FINISHED, CLOSED;
   2.158 -    }
   2.159 -    /**
   2.160 -     * The current state of the movie output stream.
   2.161 -     */
   2.162 -    private States state = States.FINISHED;
   2.163 -
   2.164 -    /**
   2.165 -     * AVI stores media data in samples.
   2.166 -     * A sample is a single element in a sequence of time-ordered data.
   2.167 -     */
   2.168 -    private static class Sample {
   2.169 -
   2.170 -        String chunkType;
   2.171 -        /** Offset of the sample relative to the start of the AVI file.
   2.172 -         */
   2.173 -        long offset;
   2.174 -        /** Data length of the sample. */
   2.175 -        long length;
   2.176 -        /**
   2.177 -         * The duration of the sample in time scale units.
   2.178 -         */
   2.179 -        int duration;
   2.180 -        /** Whether the sample is a sync-sample. */
   2.181 -        boolean isSync;
   2.182 -
   2.183 -        /**
   2.184 -         * Creates a new sample.
   2.185 -         * @param duration
   2.186 -         * @param offset
   2.187 -         * @param length
   2.188 -         */
   2.189 -        public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
   2.190 -            this.chunkType = chunkId;
   2.191 -            this.duration = duration;
   2.192 -            this.offset = offset;
   2.193 -            this.length = length;
   2.194 -            this.isSync = isSync;
   2.195 -        }
   2.196 -    }
   2.197 -    /**
   2.198 -     * List of video frames.
   2.199 -     */
   2.200 -    private LinkedList<Sample> videoFrames;
   2.201 -    /**
   2.202 -     * This chunk holds the whole AVI content.
   2.203 -     */
   2.204 -    private CompositeChunk aviChunk;
   2.205 -    /**
   2.206 -     * This chunk holds the movie frames.
   2.207 -     */
   2.208 -    private CompositeChunk moviChunk;
   2.209 -    /**
   2.210 -     * This chunk holds the AVI Main Header.
   2.211 -     */
   2.212 -    FixedSizeDataChunk avihChunk;
   2.213 -    /**
   2.214 -     * This chunk holds the AVI Stream Header.
   2.215 -     */
   2.216 -    FixedSizeDataChunk strhChunk;
   2.217 -    /**
   2.218 -     * This chunk holds the AVI Stream Format Header.
   2.219 -     */
   2.220 -    FixedSizeDataChunk strfChunk;
   2.221 -
   2.222 -    /**
   2.223 -     * Chunk base class.
   2.224 -     */
   2.225 -    private abstract class Chunk {
   2.226 -
   2.227 -        /**
   2.228 -         * The chunkType of the chunk. A String with the length of 4 characters.
   2.229 -         */
   2.230 -        protected String chunkType;
   2.231 -        /**
   2.232 -         * The offset of the chunk relative to the start of the
   2.233 -         * ImageOutputStream.
   2.234 -         */
   2.235 -        protected long offset;
   2.236 -
   2.237 -        /**
   2.238 -         * Creates a new Chunk at the current position of the ImageOutputStream.
   2.239 -         * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
   2.240 -         */
   2.241 -        public Chunk(String chunkType) throws IOException {
   2.242 -            this.chunkType = chunkType;
   2.243 -            offset = getRelativeStreamPosition();
   2.244 -        }
   2.245 -
   2.246 -        /**
   2.247 -         * Writes the chunk to the ImageOutputStream and disposes it.
   2.248 -         */
   2.249 -        public abstract void finish() throws IOException;
   2.250 -
   2.251 -        /**
   2.252 -         * Returns the size of the chunk including the size of the chunk header.
   2.253 -         * @return The size of the chunk.
   2.254 -         */
   2.255 -        public abstract long size();
   2.256 -    }
   2.257 -
   2.258 -    /**
   2.259 -     * A CompositeChunk contains an ordered list of Chunks.
   2.260 -     */
   2.261 -    private class CompositeChunk extends Chunk {
   2.262 -
   2.263 -        /**
   2.264 -         * The type of the composite. A String with the length of 4 characters.
   2.265 -         */
   2.266 -        protected String compositeType;
   2.267 -        private LinkedList<Chunk> children;
   2.268 -        private boolean finished;
   2.269 -
   2.270 -        /**
   2.271 -         * Creates a new CompositeChunk at the current position of the
   2.272 -         * ImageOutputStream.
   2.273 -         * @param compositeType The type of the composite.
   2.274 -         * @param chunkType The type of the chunk.
   2.275 -         */
   2.276 -        public CompositeChunk(String compositeType, String chunkType) throws IOException {
   2.277 -            super(chunkType);
   2.278 -            this.compositeType = compositeType;
   2.279 -            //out.write
   2.280 -            out.writeLong(0); // make room for the chunk header
   2.281 -            out.writeInt(0); // make room for the chunk header
   2.282 -            children = new LinkedList<Chunk>();
   2.283 -        }
   2.284 -
   2.285 -        public void add(Chunk child) throws IOException {
   2.286 -            if (children.size() > 0) {
   2.287 -                children.getLast().finish();
   2.288 -            }
   2.289 -            children.add(child);
   2.290 -        }
   2.291 -
   2.292 -        /**
   2.293 -         * Writes the chunk and all its children to the ImageOutputStream
   2.294 -         * and disposes of all resources held by the chunk.
   2.295 -         * @throws java.io.IOException
   2.296 -         */
   2.297 -        @Override
   2.298 -        public void finish() throws IOException {
   2.299 -            if (!finished) {
   2.300 -                if (size() > 0xffffffffL) {
   2.301 -                    throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
   2.302 -                }
   2.303 -
   2.304 -                long pointer = getRelativeStreamPosition();
   2.305 -                seekRelative(offset);
   2.306 -
   2.307 -                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   2.308 -                headerData.writeType(compositeType);
   2.309 -                headerData.writeUInt(size() - 8);
   2.310 -                headerData.writeType(chunkType);
   2.311 -                for (Chunk child : children) {
   2.312 -                    child.finish();
   2.313 -                }
   2.314 -                seekRelative(pointer);
   2.315 -                if (size() % 2 == 1) {
   2.316 -                    out.writeByte(0); // write pad byte
   2.317 -                }
   2.318 -                finished = true;
   2.319 -            }
   2.320 -        }
   2.321 -
   2.322 -        @Override
   2.323 -        public long size() {
   2.324 -            long length = 12;
   2.325 -            for (Chunk child : children) {
   2.326 -                length += child.size() + child.size() % 2;
   2.327 -            }
   2.328 -            return length;
   2.329 -        }
   2.330 -    }
   2.331 -
   2.332 -    /**
   2.333 -     * Data Chunk.
   2.334 -     */
   2.335 -    private class DataChunk extends Chunk {
   2.336 -
   2.337 -        private DataChunkOutputStream data;
   2.338 -        private boolean finished;
   2.339 -
   2.340 -        /**
   2.341 -         * Creates a new DataChunk at the current position of the
   2.342 -         * ImageOutputStream.
   2.343 -         * @param chunkType The chunkType of the chunk.
   2.344 -         */
   2.345 -        public DataChunk(String name) throws IOException {
   2.346 -            super(name);
   2.347 -            out.writeLong(0); // make room for the chunk header
   2.348 -            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
   2.349 -        }
   2.350 -
   2.351 -        public DataChunkOutputStream getOutputStream() {
   2.352 -            if (finished) {
   2.353 -                throw new IllegalStateException("DataChunk is finished");
   2.354 -            }
   2.355 -            return data;
   2.356 -        }
   2.357 -
   2.358 -        /**
   2.359 -         * Returns the offset of this chunk to the beginning of the random access file
   2.360 -         * @return
   2.361 -         */
   2.362 -        public long getOffset() {
   2.363 -            return offset;
   2.364 -        }
   2.365 -
   2.366 -        @Override
   2.367 -        public void finish() throws IOException {
   2.368 -            if (!finished) {
   2.369 -                long sizeBefore = size();
   2.370 -
   2.371 -                if (size() > 0xffffffffL) {
   2.372 -                    throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
   2.373 -                }
   2.374 -
   2.375 -                long pointer = getRelativeStreamPosition();
   2.376 -                seekRelative(offset);
   2.377 -
   2.378 -                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   2.379 -                headerData.writeType(chunkType);
   2.380 -                headerData.writeUInt(size() - 8);
   2.381 -                seekRelative(pointer);
   2.382 -                if (size() % 2 == 1) {
   2.383 -                    out.writeByte(0); // write pad byte
   2.384 -                }
   2.385 -                finished = true;
   2.386 -                long sizeAfter = size();
   2.387 -                if (sizeBefore != sizeAfter) {
   2.388 -                    System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
   2.389 -                }
   2.390 -            }
   2.391 -        }
   2.392 -
   2.393 -        @Override
   2.394 -        public long size() {
   2.395 -            return 8 + data.size();
   2.396 -        }
   2.397 -    }
   2.398 -
   2.399 -    /**
   2.400 -     * A DataChunk with a fixed size.
   2.401 -     */
   2.402 -    private class FixedSizeDataChunk extends Chunk {
   2.403 -
   2.404 -        private DataChunkOutputStream data;
   2.405 -        private boolean finished;
   2.406 -        private long fixedSize;
   2.407 -
   2.408 -        /**
   2.409 -         * Creates a new DataChunk at the current position of the
   2.410 -         * ImageOutputStream.
   2.411 -         * @param chunkType The chunkType of the chunk.
   2.412 -         */
   2.413 -        public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
   2.414 -            super(chunkType);
   2.415 -            this.fixedSize = fixedSize;
   2.416 -            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   2.417 -            data.writeType(chunkType);
   2.418 -            data.writeUInt(fixedSize);
   2.419 -            data.clearCount();
   2.420 -
   2.421 -            // Fill fixed size with nulls
   2.422 -            byte[] buf = new byte[(int) Math.min(512, fixedSize)];
   2.423 -            long written = 0;
   2.424 -            while (written < fixedSize) {
   2.425 -                data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
   2.426 -                written += Math.min(buf.length, fixedSize - written);
   2.427 -            }
   2.428 -            if (fixedSize % 2 == 1) {
   2.429 -                out.writeByte(0); // write pad byte
   2.430 -            }
   2.431 -            seekToStartOfData();
   2.432 -        }
   2.433 -
   2.434 -        public DataChunkOutputStream getOutputStream() {
   2.435 -            /*if (finished) {
   2.436 -            throw new IllegalStateException("DataChunk is finished");
   2.437 -            }*/
   2.438 -            return data;
   2.439 -        }
   2.440 -
   2.441 -        /**
   2.442 -         * Returns the offset of this chunk to the beginning of the random access file
   2.443 -         * @return
   2.444 -         */
   2.445 -        public long getOffset() {
   2.446 -            return offset;
   2.447 -        }
   2.448 -
   2.449 -        public void seekToStartOfData() throws IOException {
   2.450 -            seekRelative(offset + 8);
   2.451 -            data.clearCount();
   2.452 -        }
   2.453 -
   2.454 -        public void seekToEndOfChunk() throws IOException {
   2.455 -            seekRelative(offset + 8 + fixedSize + fixedSize % 2);
   2.456 -        }
   2.457 -
   2.458 -        @Override
   2.459 -        public void finish() throws IOException {
   2.460 -            if (!finished) {
   2.461 -                finished = true;
   2.462 -            }
   2.463 -        }
   2.464 -
   2.465 -        @Override
   2.466 -        public long size() {
   2.467 -            return 8 + fixedSize;
   2.468 -        }
   2.469 -    }
   2.470 -
   2.471 -    /**
   2.472 -     * Creates a new AVI file with the specified video format and
   2.473 -     * frame rate. The video has 24 bits per pixel.
   2.474 -     *
   2.475 -     * @param file the output file
   2.476 -     * @param format Selects an encoder for the video format.
   2.477 -     * @param bitsPerPixel the number of bits per pixel.
   2.478 -     * @exception IllegalArgumentException if videoFormat is null or if
   2.479 -     * frame rate is <= 0
   2.480 -     */
   2.481 -    public AVIOutputStream(File file, VideoFormat format) throws IOException {
   2.482 -        this(file,format,24);
   2.483 -    }
   2.484 -    /**
   2.485 -     * Creates a new AVI file with the specified video format and
   2.486 -     * frame rate.
   2.487 -     *
   2.488 -     * @param file the output file
   2.489 -     * @param format Selects an encoder for the video format.
   2.490 -     * @param bitsPerPixel the number of bits per pixel.
   2.491 -     * @exception IllegalArgumentException if videoFormat is null or if
   2.492 -     * frame rate is <= 0
   2.493 -     */
   2.494 -    public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
   2.495 -        if (format == null) {
   2.496 -            throw new IllegalArgumentException("format must not be null");
   2.497 -        }
   2.498 -
   2.499 -        if (file.exists()) {
   2.500 -            file.delete();
   2.501 -        }
   2.502 -        this.out = new FileImageOutputStream(file);
   2.503 -        this.streamOffset = 0;
   2.504 -        this.videoFormat = format;
   2.505 -        this.videoFrames = new LinkedList<Sample>();
   2.506 -        this.imgDepth = bitsPerPixel;
   2.507 -        if (imgDepth == 4) {
   2.508 -            byte[] gray = new byte[16];
   2.509 -            for (int i = 0; i < gray.length; i++) {
   2.510 -                gray[i] = (byte) ((i << 4) | i);
   2.511 -            }
   2.512 -            palette = new IndexColorModel(4, 16, gray, gray, gray);
   2.513 -        } else if (imgDepth == 8) {
   2.514 -            byte[] gray = new byte[256];
   2.515 -            for (int i = 0; i < gray.length; i++) {
   2.516 -                gray[i] = (byte) i;
   2.517 -            }
   2.518 -            palette = new IndexColorModel(8, 256, gray, gray, gray);
   2.519 -        }
   2.520 -
   2.521 -    }
   2.522 -
   2.523 -    /**
   2.524 -     * Creates a new AVI output stream with the specified video format and
   2.525 -     * framerate.
   2.526 -     *
   2.527 -     * @param out the underlying output stream
   2.528 -     * @param format Selects an encoder for the video format.
   2.529 -     * @exception IllegalArgumentException if videoFormat is null or if
   2.530 -     * framerate is <= 0
   2.531 -     */
   2.532 -    public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
   2.533 -        if (format == null) {
   2.534 -            throw new IllegalArgumentException("format must not be null");
   2.535 -        }
   2.536 -        this.out = out;
   2.537 -        this.streamOffset = out.getStreamPosition();
   2.538 -        this.videoFormat = format;
   2.539 -        this.videoFrames = new LinkedList<Sample>();
   2.540 -    }
   2.541 -
   2.542 -    /**
   2.543 -     * Used with frameRate to specify the time scale that this stream will use.
   2.544 -     * Dividing frameRate by timeScale gives the number of samples per second.
   2.545 -     * For video streams, this is the frame rate. For audio streams, this rate
   2.546 -     * corresponds to the time needed to play nBlockAlign bytes of audio, which
   2.547 -     * for PCM audio is the just the sample rate.
   2.548 -     * <p>
   2.549 -     * The default value is 1.
   2.550 -     *
   2.551 -     * @param newValue
   2.552 -     */
   2.553 -    public void setTimeScale(int newValue) {
   2.554 -        if (newValue <= 0) {
   2.555 -            throw new IllegalArgumentException("timeScale must be greater 0");
   2.556 -        }
   2.557 -        this.timeScale = newValue;
   2.558 -    }
   2.559 -
   2.560 -    /**
   2.561 -     * Returns the time scale of this media.
   2.562 -     *
   2.563 -     * @return time scale
   2.564 -     */
   2.565 -    public int getTimeScale() {
   2.566 -        return timeScale;
   2.567 -    }
   2.568 -
   2.569 -    /**
   2.570 -     * Sets the rate of video frames in time scale units.
   2.571 -     * <p>
   2.572 -     * The default value is 30. Together with the default value 1 of timeScale
   2.573 -     * this results in 30 frames pers second.
   2.574 -     *
   2.575 -     * @param newValue
   2.576 -     */
   2.577 -    public void setFrameRate(int newValue) {
   2.578 -        if (newValue <= 0) {
   2.579 -            throw new IllegalArgumentException("frameDuration must be greater 0");
   2.580 -        }
   2.581 -        if (state == States.STARTED) {
   2.582 -            throw new IllegalStateException("frameDuration must be set before the first frame is written");
   2.583 -        }
   2.584 -        this.frameRate = newValue;
   2.585 -    }
   2.586 -
   2.587 -    /**
   2.588 -     * Returns the frame rate of this media.
   2.589 -     *
   2.590 -     * @return frame rate
   2.591 -     */
   2.592 -    public int getFrameRate() {
   2.593 -        return frameRate;
   2.594 -    }
   2.595 -
   2.596 -    /** Sets the global color palette. */
   2.597 -    public void setPalette(IndexColorModel palette) {
   2.598 -        this.palette = palette;
   2.599 -    }
   2.600 -
   2.601 -    /**
   2.602 -     * Sets the compression quality of the video track.
   2.603 -     * A value of 0 stands for "high compression is important" a value of
   2.604 -     * 1 for "high image quality is important".
   2.605 -     * <p>
   2.606 -     * Changing this value affects frames which are subsequently written
   2.607 -     * to the AVIOutputStream. Frames which have already been written
   2.608 -     * are not changed.
   2.609 -     * <p>
   2.610 -     * This value has only effect on videos encoded with JPG format.
   2.611 -     * <p>
   2.612 -     * The default value is 0.9.
   2.613 -     *
   2.614 -     * @param newValue
   2.615 -     */
   2.616 -    public void setVideoCompressionQuality(float newValue) {
   2.617 -        this.quality = newValue;
   2.618 -    }
   2.619 -
   2.620 -    /**
   2.621 -     * Returns the video compression quality.
   2.622 -     *
   2.623 -     * @return video compression quality
   2.624 -     */
   2.625 -    public float getVideoCompressionQuality() {
   2.626 -        return quality;
   2.627 -    }
   2.628 -
   2.629 -    /**
   2.630 -     * Sets the dimension of the video track.
   2.631 -     * <p>
   2.632 -     * You need to explicitly set the dimension, if you add all frames from
   2.633 -     * files or input streams.
   2.634 -     * <p>
   2.635 -     * If you add frames from buffered images, then AVIOutputStream
   2.636 -     * can determine the video dimension from the image width and height.
   2.637 -     *
   2.638 -     * @param width Must be greater than 0.
   2.639 -     * @param height Must be greater than 0.
   2.640 -     */
   2.641 -    public void setVideoDimension(int width, int height) {
   2.642 -        if (width < 1 || height < 1) {
   2.643 -            throw new IllegalArgumentException("width and height must be greater zero.");
   2.644 -        }
   2.645 -        this.imgWidth = width;
   2.646 -        this.imgHeight = height;
   2.647 -    }
   2.648 -
   2.649 -    /**
   2.650 -     * Gets the dimension of the video track.
   2.651 -     * <p>
   2.652 -     * Returns null if the dimension is not known.
   2.653 -     */
   2.654 -    public Dimension getVideoDimension() {
   2.655 -        if (imgWidth < 1 || imgHeight < 1) {
   2.656 -            return null;
   2.657 -        }
   2.658 -        return new Dimension(imgWidth, imgHeight);
   2.659 -    }
   2.660 -
   2.661 -    /**
   2.662 -     * Sets the state of the QuickTimeOutpuStream to started.
   2.663 -     * <p>
   2.664 -     * If the state is changed by this method, the prolog is
   2.665 -     * written.
   2.666 -     */
   2.667 -    private void ensureStarted() throws IOException {
   2.668 -        if (state != States.STARTED) {
   2.669 -            creationTime = new Date();
   2.670 -            writeProlog();
   2.671 -            state = States.STARTED;
   2.672 -        }
   2.673 -    }
   2.674 -
   2.675 -    /**
   2.676 -     * Writes a frame to the video track.
   2.677 -     * <p>
   2.678 -     * If the dimension of the video track has not been specified yet, it
   2.679 -     * is derived from the first buffered image added to the AVIOutputStream.
   2.680 -     *
   2.681 -     * @param image The frame image.
   2.682 -     *
   2.683 -     * @throws IllegalArgumentException if the duration is less than 1, or
   2.684 -     * if the dimension of the frame does not match the dimension of the video
   2.685 -     * track.
   2.686 -     * @throws IOException if writing the image failed.
   2.687 -     */
   2.688 -    public void writeFrame(BufferedImage image) throws IOException {
   2.689 -        ensureOpen();
   2.690 -        ensureStarted();
   2.691 -
   2.692 -        // Get the dimensions of the first image
   2.693 -        if (imgWidth == -1) {
   2.694 -            imgWidth = image.getWidth();
   2.695 -            imgHeight = image.getHeight();
   2.696 -        } else {
   2.697 -            // The dimension of the image must match the dimension of the video track
   2.698 -            if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
   2.699 -                throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
   2.700 -                        + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
   2.701 -                        + ") differs from image[0] (width="
   2.702 -                        + imgWidth + ", height=" + imgHeight);
   2.703 -            }
   2.704 -        }
   2.705 -
   2.706 -        DataChunk videoFrameChunk;
   2.707 -        long offset = getRelativeStreamPosition();
   2.708 -        boolean isSync = true;
   2.709 -        switch (videoFormat) {
   2.710 -            case RAW: {
   2.711 -                switch (imgDepth) {
   2.712 -                    case 4: {
   2.713 -                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
   2.714 -                        int[] imgRGBs = new int[16];
   2.715 -                        imgPalette.getRGBs(imgRGBs);
   2.716 -                        int[] previousRGBs = new int[16];
   2.717 -                        if (previousPalette == null) {
   2.718 -                            previousPalette = palette;
   2.719 -                        }
   2.720 -                        previousPalette.getRGBs(previousRGBs);
   2.721 -                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
   2.722 -                            previousPalette = imgPalette;
   2.723 -                            DataChunk paletteChangeChunk = new DataChunk("00pc");
   2.724 -                            /*
   2.725 -                            int first = imgPalette.getMapSize();
   2.726 -                            int last = -1;
   2.727 -                            for (int i = 0; i < 16; i++) {
   2.728 -                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
   2.729 -                            first = i;
   2.730 -                            }
   2.731 -                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
   2.732 -                            last = i;
   2.733 -                            }
   2.734 -                            }*/
   2.735 -                            int first = 0;
   2.736 -                            int last = imgPalette.getMapSize() - 1;
   2.737 -                            /*
   2.738 -                             * typedef struct {
   2.739 -                            BYTE         bFirstEntry;
   2.740 -                            BYTE         bNumEntries;
   2.741 -                            WORD         wFlags;
   2.742 -                            PALETTEENTRY peNew[];
   2.743 -                            } AVIPALCHANGE;
   2.744 -                             *
   2.745 -                             * typedef struct tagPALETTEENTRY {
   2.746 -                            BYTE peRed;
   2.747 -                            BYTE peGreen;
   2.748 -                            BYTE peBlue;
   2.749 -                            BYTE peFlags;
   2.750 -                            } PALETTEENTRY;
   2.751 -                             */
   2.752 -                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
   2.753 -                            pOut.writeByte(first);//bFirstEntry
   2.754 -                            pOut.writeByte(last - first + 1);//bNumEntries
   2.755 -                            pOut.writeShort(0);//wFlags
   2.756 -
   2.757 -                            for (int i = first; i <= last; i++) {
   2.758 -                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
   2.759 -                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
   2.760 -                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
   2.761 -                                pOut.writeByte(0); // reserved*/
   2.762 -                            }
   2.763 -
   2.764 -                            moviChunk.add(paletteChangeChunk);
   2.765 -                            paletteChangeChunk.finish();
   2.766 -                            long length = getRelativeStreamPosition() - offset;
   2.767 -                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
   2.768 -                            offset = getRelativeStreamPosition();
   2.769 -                        }
   2.770 -
   2.771 -                        videoFrameChunk = new DataChunk("00db");
   2.772 -                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
   2.773 -                        byte[] rgb4 = new byte[imgWidth / 2];
   2.774 -                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
   2.775 -                            for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
   2.776 -                                rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
   2.777 -                            }
   2.778 -                            videoFrameChunk.getOutputStream().write(rgb4);
   2.779 -                        }
   2.780 -                        break;
   2.781 -                    }
   2.782 -                    case 8: {
   2.783 -                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
   2.784 -                        int[] imgRGBs = new int[256];
   2.785 -                        imgPalette.getRGBs(imgRGBs);
   2.786 -                        int[] previousRGBs = new int[256];
   2.787 -                        if (previousPalette == null) {
   2.788 -                            previousPalette = palette;
   2.789 -                        }
   2.790 -                        previousPalette.getRGBs(previousRGBs);
   2.791 -                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
   2.792 -                            previousPalette = imgPalette;
   2.793 -                            DataChunk paletteChangeChunk = new DataChunk("00pc");
   2.794 -                            /*
   2.795 -                            int first = imgPalette.getMapSize();
   2.796 -                            int last = -1;
   2.797 -                            for (int i = 0; i < 16; i++) {
   2.798 -                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
   2.799 -                            first = i;
   2.800 -                            }
   2.801 -                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
   2.802 -                            last = i;
   2.803 -                            }
   2.804 -                            }*/
   2.805 -                            int first = 0;
   2.806 -                            int last = imgPalette.getMapSize() - 1;
   2.807 -                            /*
   2.808 -                             * typedef struct {
   2.809 -                            BYTE         bFirstEntry;
   2.810 -                            BYTE         bNumEntries;
   2.811 -                            WORD         wFlags;
   2.812 -                            PALETTEENTRY peNew[];
   2.813 -                            } AVIPALCHANGE;
   2.814 -                             *
   2.815 -                             * typedef struct tagPALETTEENTRY {
   2.816 -                            BYTE peRed;
   2.817 -                            BYTE peGreen;
   2.818 -                            BYTE peBlue;
   2.819 -                            BYTE peFlags;
   2.820 -                            } PALETTEENTRY;
   2.821 -                             */
   2.822 -                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
   2.823 -                            pOut.writeByte(first);//bFirstEntry
   2.824 -                            pOut.writeByte(last - first + 1);//bNumEntries
   2.825 -                            pOut.writeShort(0);//wFlags
   2.826 -
   2.827 -                            for (int i = first; i <= last; i++) {
   2.828 -                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
   2.829 -                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
   2.830 -                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
   2.831 -                                pOut.writeByte(0); // reserved*/
   2.832 -                            }
   2.833 -
   2.834 -                            moviChunk.add(paletteChangeChunk);
   2.835 -                            paletteChangeChunk.finish();
   2.836 -                            long length = getRelativeStreamPosition() - offset;
   2.837 -                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
   2.838 -                            offset = getRelativeStreamPosition();
   2.839 -                        }
   2.840 -
   2.841 -                        videoFrameChunk = new DataChunk("00db");
   2.842 -                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
   2.843 -                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
   2.844 -                            videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
   2.845 -                        }
   2.846 -                        break;
   2.847 -                    }
   2.848 -                    default: {
   2.849 -                        videoFrameChunk = new DataChunk("00db");
   2.850 -                        WritableRaster raster = image.getRaster();
   2.851 -                        int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
   2.852 -                        byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
   2.853 -                        for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
   2.854 -                            raster.getPixels(0, y, imgWidth, 1, raw);
   2.855 -                            for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
   2.856 -                                bytes[x + 2] = (byte) raw[x]; // Blue
   2.857 -                                bytes[x + 1] = (byte) raw[x + 1]; // Green
   2.858 -                                bytes[x] = (byte) raw[x + 2]; // Red
   2.859 -                            }
   2.860 -                            videoFrameChunk.getOutputStream().write(bytes);
   2.861 -                        }
   2.862 -                        break;
   2.863 -                    }
   2.864 -                }
   2.865 -                break;
   2.866 -            }
   2.867 -            
   2.868 -            case JPG: {
   2.869 -                videoFrameChunk = new DataChunk("00dc");
   2.870 -                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
   2.871 -                ImageWriteParam iwParam = iw.getDefaultWriteParam();
   2.872 -                iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
   2.873 -                iwParam.setCompressionQuality(quality);
   2.874 -                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
   2.875 -                iw.setOutput(imgOut);
   2.876 -                IIOImage img = new IIOImage(image, null, null);
   2.877 -                iw.write(null, img, iwParam);
   2.878 -                iw.dispose();
   2.879 -                break;
   2.880 -            }
   2.881 -            case PNG:
   2.882 -            default: {
   2.883 -                videoFrameChunk = new DataChunk("00dc");
   2.884 -                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
   2.885 -                ImageWriteParam iwParam = iw.getDefaultWriteParam();
   2.886 -                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
   2.887 -                iw.setOutput(imgOut);
   2.888 -                IIOImage img = new IIOImage(image, null, null);
   2.889 -                iw.write(null, img, iwParam);
   2.890 -                iw.dispose();
   2.891 -                break;
   2.892 -            }
   2.893 -        }
   2.894 -        long length = getRelativeStreamPosition() - offset;
   2.895 -        moviChunk.add(videoFrameChunk);
   2.896 -        videoFrameChunk.finish();
   2.897 -
   2.898 -        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
   2.899 -        if (getRelativeStreamPosition() > 1L << 32) {
   2.900 -            throw new IOException("AVI file is larger than 4 GB");
   2.901 -        }
   2.902 -    }
   2.903 -
   2.904 -    /**
   2.905 -     * Writes a frame from a file to the video track.
   2.906 -     * <p>
   2.907 -     * This method does not inspect the contents of the file.
   2.908 -     * For example, Its your responsibility to only add JPG files if you have
   2.909 -     * chosen the JPEG video format.
   2.910 -     * <p>
   2.911 -     * If you add all frames from files or from input streams, then you
   2.912 -     * have to explicitly set the dimension of the video track before you
   2.913 -     * call finish() or close().
   2.914 -     *
   2.915 -     * @param file The file which holds the image data.
   2.916 -     *
   2.917 -     * @throws IllegalStateException if the duration is less than 1.
   2.918 -     * @throws IOException if writing the image failed.
   2.919 -     */
   2.920 -    public void writeFrame(File file) throws IOException {
   2.921 -        FileInputStream in = null;
   2.922 -        try {
   2.923 -            in = new FileInputStream(file);
   2.924 -            writeFrame(in);
   2.925 -        } finally {
   2.926 -            if (in != null) {
   2.927 -                in.close();
   2.928 -            }
   2.929 -        }
   2.930 -    }
   2.931 -
   2.932 -    /**
   2.933 -     * Writes a frame to the video track.
   2.934 -     * <p>
   2.935 -     * This method does not inspect the contents of the file.
   2.936 -     * For example, its your responsibility to only add JPG files if you have
   2.937 -     * chosen the JPEG video format.
   2.938 -     * <p>
   2.939 -     * If you add all frames from files or from input streams, then you
   2.940 -     * have to explicitly set the dimension of the video track before you
   2.941 -     * call finish() or close().
   2.942 -     *
   2.943 -     * @param in The input stream which holds the image data.
   2.944 -     *
   2.945 -     * @throws IllegalArgumentException if the duration is less than 1.
   2.946 -     * @throws IOException if writing the image failed.
   2.947 -     */
   2.948 -    public void writeFrame(InputStream in) throws IOException {
   2.949 -        ensureOpen();
   2.950 -        ensureStarted();
   2.951 -
   2.952 -        DataChunk videoFrameChunk = new DataChunk(
   2.953 -                videoFormat == VideoFormat.RAW ? "00db" : "00dc");
   2.954 -        moviChunk.add(videoFrameChunk);
   2.955 -        OutputStream mdatOut = videoFrameChunk.getOutputStream();
   2.956 -        long offset = getRelativeStreamPosition();
   2.957 -        byte[] buf = new byte[512];
   2.958 -        int len;
   2.959 -        while ((len = in.read(buf)) != -1) {
   2.960 -            mdatOut.write(buf, 0, len);
   2.961 -        }
   2.962 -        long length = getRelativeStreamPosition() - offset;
   2.963 -        videoFrameChunk.finish();
   2.964 -        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
   2.965 -        if (getRelativeStreamPosition() > 1L << 32) {
   2.966 -            throw new IOException("AVI file is larger than 4 GB");
   2.967 -        }
   2.968 -    }
   2.969 -
   2.970 -    /**
   2.971 -     * Closes the movie file as well as the stream being filtered.
   2.972 -     *
   2.973 -     * @exception IOException if an I/O error has occurred
   2.974 -     */
   2.975 -    public void close() throws IOException {
   2.976 -        if (state == States.STARTED) {
   2.977 -            finish();
   2.978 -        }
   2.979 -        if (state != States.CLOSED) {
   2.980 -            out.close();
   2.981 -            state = States.CLOSED;
   2.982 -        }
   2.983 -    }
   2.984 -
   2.985 -    /**
   2.986 -     * Finishes writing the contents of the AVI output stream without closing
   2.987 -     * the underlying stream. Use this method when applying multiple filters
   2.988 -     * in succession to the same output stream.
   2.989 -     *
   2.990 -     * @exception IllegalStateException if the dimension of the video track
   2.991 -     * has not been specified or determined yet.
   2.992 -     * @exception IOException if an I/O exception has occurred
   2.993 -     */
   2.994 -    public void finish() throws IOException {
   2.995 -        ensureOpen();
   2.996 -        if (state != States.FINISHED) {
   2.997 -            if (imgWidth == -1 || imgHeight == -1) {
   2.998 -                throw new IllegalStateException("image width and height must be specified");
   2.999 -            }
  2.1000 -
  2.1001 -            moviChunk.finish();
  2.1002 -            writeEpilog();
  2.1003 -            state = States.FINISHED;
  2.1004 -            imgWidth = imgHeight = -1;
  2.1005 -        }
  2.1006 -    }
  2.1007 -
  2.1008 -    /**
  2.1009 -     * Check to make sure that this stream has not been closed
  2.1010 -     */
  2.1011 -    private void ensureOpen() throws IOException {
  2.1012 -        if (state == States.CLOSED) {
  2.1013 -            throw new IOException("Stream closed");
  2.1014 -        }
  2.1015 -    }
  2.1016 -
  2.1017 -    /** Gets the position relative to the beginning of the QuickTime stream.
  2.1018 -     * <p>
  2.1019 -     * Usually this value is equal to the stream position of the underlying
  2.1020 -     * ImageOutputStream, but can be larger if the underlying stream already
  2.1021 -     * contained data.
  2.1022 -     *
  2.1023 -     * @return The relative stream position.
  2.1024 -     * @throws IOException
  2.1025 -     */
  2.1026 -    private long getRelativeStreamPosition() throws IOException {
  2.1027 -        return out.getStreamPosition() - streamOffset;
  2.1028 -    }
  2.1029 -
  2.1030 -    /** Seeks relative to the beginning of the QuickTime stream.
  2.1031 -     * <p>
  2.1032 -     * Usually this equal to seeking in the underlying ImageOutputStream, but
  2.1033 -     * can be different if the underlying stream already contained data.
  2.1034 -     *
  2.1035 -     */
  2.1036 -    private void seekRelative(long newPosition) throws IOException {
  2.1037 -        out.seek(newPosition + streamOffset);
  2.1038 -    }
  2.1039 -
  2.1040 -    private void writeProlog() throws IOException {
  2.1041 -        // The file has the following structure:
  2.1042 -        //
  2.1043 -        // .RIFF AVI
  2.1044 -        // ..avih (AVI Header Chunk)
  2.1045 -        // ..LIST strl
  2.1046 -        // ...strh (Stream Header Chunk)
  2.1047 -        // ...strf (Stream Format Chunk)
  2.1048 -        // ..LIST movi
  2.1049 -        // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
  2.1050 -        // ..idx1 (List of video data chunks and their location in the file)
  2.1051 -
  2.1052 -        // The RIFF AVI Chunk holds the complete movie
  2.1053 -        aviChunk = new CompositeChunk("RIFF", "AVI ");
  2.1054 -        CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
  2.1055 -
  2.1056 -        // Write empty AVI Main Header Chunk - we fill the data in later
  2.1057 -        aviChunk.add(hdrlChunk);
  2.1058 -        avihChunk = new FixedSizeDataChunk("avih", 56);
  2.1059 -        avihChunk.seekToEndOfChunk();
  2.1060 -        hdrlChunk.add(avihChunk);
  2.1061 -
  2.1062 -        CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
  2.1063 -        hdrlChunk.add(strlChunk);
  2.1064 -
  2.1065 -        // Write empty AVI Stream Header Chunk - we fill the data in later
  2.1066 -        strhChunk = new FixedSizeDataChunk("strh", 56);
  2.1067 -        strhChunk.seekToEndOfChunk();
  2.1068 -        strlChunk.add(strhChunk);
  2.1069 -        strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
  2.1070 -        strfChunk.seekToEndOfChunk();
  2.1071 -        strlChunk.add(strfChunk);
  2.1072 -
  2.1073 -        moviChunk = new CompositeChunk("LIST", "movi");
  2.1074 -        aviChunk.add(moviChunk);
  2.1075 -
  2.1076 -
  2.1077 -    }
  2.1078 -
  2.1079 -    private void writeEpilog() throws IOException {
  2.1080 -        // Compute values
  2.1081 -        int duration = 0;
  2.1082 -        for (Sample s : videoFrames) {
  2.1083 -            duration += s.duration;
  2.1084 -        }
  2.1085 -        long bufferSize = 0;
  2.1086 -        for (Sample s : videoFrames) {
  2.1087 -            if (s.length > bufferSize) {
  2.1088 -                bufferSize = s.length;
  2.1089 -            }
  2.1090 -        }
  2.1091 -
  2.1092 -
  2.1093 -        DataChunkOutputStream d;
  2.1094 -
  2.1095 -        /* Create Idx1 Chunk and write data
  2.1096 -         * -------------
  2.1097 -        typedef struct _avioldindex {
  2.1098 -        FOURCC  fcc;
  2.1099 -        DWORD   cb;
  2.1100 -        struct _avioldindex_entry {
  2.1101 -        DWORD   dwChunkId;
  2.1102 -        DWORD   dwFlags;
  2.1103 -        DWORD   dwOffset;
  2.1104 -        DWORD   dwSize;
  2.1105 -        } aIndex[];
  2.1106 -        } AVIOLDINDEX;
  2.1107 -         */
  2.1108 -        DataChunk idx1Chunk = new DataChunk("idx1");
  2.1109 -        aviChunk.add(idx1Chunk);
  2.1110 -        d = idx1Chunk.getOutputStream();
  2.1111 -        long moviListOffset = moviChunk.offset + 8;
  2.1112 -        //moviListOffset = 0;
  2.1113 -        for (Sample f : videoFrames) {
  2.1114 -
  2.1115 -            d.writeType(f.chunkType); // dwChunkId
  2.1116 -            // Specifies a FOURCC that identifies a stream in the AVI file. The
  2.1117 -            // FOURCC must have the form 'xxyy' where xx is the stream number and yy
  2.1118 -            // is a two-character code that identifies the contents of the stream:
  2.1119 -            //
  2.1120 -            // Two-character code   Description
  2.1121 -            //  db                  Uncompressed video frame
  2.1122 -            //  dc                  Compressed video frame
  2.1123 -            //  pc                  Palette change
  2.1124 -            //  wb                  Audio data
  2.1125 -
  2.1126 -            d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
  2.1127 -                    | (f.isSync ? 0x10 : 0x0)); // dwFlags
  2.1128 -            // Specifies a bitwise combination of zero or more of the following
  2.1129 -            // flags:
  2.1130 -            //
  2.1131 -            // Value    Name            Description
  2.1132 -            // 0x10     AVIIF_KEYFRAME  The data chunk is a key frame.
  2.1133 -            // 0x1      AVIIF_LIST      The data chunk is a 'rec ' list.
  2.1134 -            // 0x100    AVIIF_NO_TIME   The data chunk does not affect the timing of the
  2.1135 -            //                          stream. For example, this flag should be set for
  2.1136 -            //                          palette changes.
  2.1137 -
  2.1138 -            d.writeUInt(f.offset - moviListOffset); // dwOffset
  2.1139 -            // Specifies the location of the data chunk in the file. The value
  2.1140 -            // should be specified as an offset, in bytes, from the start of the
  2.1141 -            // 'movi' list; however, in some AVI files it is given as an offset from
  2.1142 -            // the start of the file.
  2.1143 -
  2.1144 -            d.writeUInt(f.length); // dwSize
  2.1145 -            // Specifies the size of the data chunk, in bytes.
  2.1146 -        }
  2.1147 -        idx1Chunk.finish();
  2.1148 -
  2.1149 -        /* Write Data into AVI Main Header Chunk
  2.1150 -         * -------------
  2.1151 -         * The AVIMAINHEADER structure defines global information in an AVI file.
  2.1152 -         * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
  2.1153 -        typedef struct _avimainheader {
  2.1154 -        FOURCC fcc;
  2.1155 -        DWORD  cb;
  2.1156 -        DWORD  dwMicroSecPerFrame;
  2.1157 -        DWORD  dwMaxBytesPerSec;
  2.1158 -        DWORD  dwPaddingGranularity;
  2.1159 -        DWORD  dwFlags;
  2.1160 -        DWORD  dwTotalFrames;
  2.1161 -        DWORD  dwInitialFrames;
  2.1162 -        DWORD  dwStreams;
  2.1163 -        DWORD  dwSuggestedBufferSize;
  2.1164 -        DWORD  dwWidth;
  2.1165 -        DWORD  dwHeight;
  2.1166 -        DWORD  dwReserved[4];
  2.1167 -        } AVIMAINHEADER; */
  2.1168 -        avihChunk.seekToStartOfData();
  2.1169 -        d = avihChunk.getOutputStream();
  2.1170 -
  2.1171 -        d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
  2.1172 -        // Specifies the number of microseconds between frames.
  2.1173 -        // This value indicates the overall timing for the file.
  2.1174 -
  2.1175 -        d.writeUInt(0); // dwMaxBytesPerSec
  2.1176 -        // Specifies the approximate maximum data rate of the file.
  2.1177 -        // This value indicates the number of bytes per second the system
  2.1178 -        // must handle to present an AVI sequence as specified by the other
  2.1179 -        // parameters contained in the main header and stream header chunks.
  2.1180 -
  2.1181 -        d.writeUInt(0); // dwPaddingGranularity
  2.1182 -        // Specifies the alignment for data, in bytes. Pad the data to multiples
  2.1183 -        // of this value.
  2.1184 -
  2.1185 -        d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
  2.1186 -        // Contains a bitwise combination of zero or more of the following
  2.1187 -        // flags:
  2.1188 -        //
  2.1189 -        // Value   Name         Description
  2.1190 -        // 0x10    AVIF_HASINDEX Indicates the AVI file has an index.
  2.1191 -        // 0x20    AVIF_MUSTUSEINDEX Indicates that application should use the
  2.1192 -        //                      index, rather than the physical ordering of the
  2.1193 -        //                      chunks in the file, to determine the order of
  2.1194 -        //                      presentation of the data. For example, this flag
  2.1195 -        //                      could be used to create a list of frames for
  2.1196 -        //                      editing.
  2.1197 -        // 0x100   AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
  2.1198 -        // 0x1000  AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
  2.1199 -        //                      allocated file used for capturing real-time
  2.1200 -        //                      video. Applications should warn the user before
  2.1201 -        //                      writing over a file with this flag set because
  2.1202 -        //                      the user probably defragmented this file.
  2.1203 -        // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
  2.1204 -        //                      data and software. When this flag is used,
  2.1205 -        //                      software should not permit the data to be
  2.1206 -        //                      duplicated.
  2.1207 -
  2.1208 -        d.writeUInt(videoFrames.size()); // dwTotalFrames
  2.1209 -        // Specifies the total number of frames of data in the file.
  2.1210 -
  2.1211 -        d.writeUInt(0); // dwInitialFrames
  2.1212 -        // Specifies the initial frame for interleaved files. Noninterleaved
  2.1213 -        // files should specify zero. If you are creating interleaved files,
  2.1214 -        // specify the number of frames in the file prior to the initial frame
  2.1215 -        // of the AVI sequence in this member.
  2.1216 -        // To give the audio driver enough audio to work with, the audio data in
  2.1217 -        // an interleaved file must be skewed from the video data. Typically,
  2.1218 -        // the audio data should be moved forward enough frames to allow
  2.1219 -        // approximately 0.75 seconds of audio data to be preloaded. The
  2.1220 -        // dwInitialRecords member should be set to the number of frames the
  2.1221 -        // audio is skewed. Also set the same value for the dwInitialFrames
  2.1222 -        // member of the AVISTREAMHEADER structure in the audio stream header
  2.1223 -
  2.1224 -        d.writeUInt(1); // dwStreams
  2.1225 -        // Specifies the number of streams in the file. For example, a file with
  2.1226 -        // audio and video has two streams.
  2.1227 -
  2.1228 -        d.writeUInt(bufferSize); // dwSuggestedBufferSize
  2.1229 -        // Specifies the suggested buffer size for reading the file. Generally,
  2.1230 -        // this size should be large enough to contain the largest chunk in the
  2.1231 -        // file. If set to zero, or if it is too small, the playback software
  2.1232 -        // will have to reallocate memory during playback, which will reduce
  2.1233 -        // performance. For an interleaved file, the buffer size should be large
  2.1234 -        // enough to read an entire record, and not just a chunk.
  2.1235 -
  2.1236 -
  2.1237 -        d.writeUInt(imgWidth); // dwWidth
  2.1238 -        // Specifies the width of the AVI file in pixels.
  2.1239 -
  2.1240 -        d.writeUInt(imgHeight); // dwHeight
  2.1241 -        // Specifies the height of the AVI file in pixels.
  2.1242 -
  2.1243 -        d.writeUInt(0); // dwReserved[0]
  2.1244 -        d.writeUInt(0); // dwReserved[1]
  2.1245 -        d.writeUInt(0); // dwReserved[2]
  2.1246 -        d.writeUInt(0); // dwReserved[3]
  2.1247 -        // Reserved. Set this array to zero.
  2.1248 -
  2.1249 -        /* Write Data into AVI Stream Header Chunk
  2.1250 -         * -------------
  2.1251 -         * The AVISTREAMHEADER structure contains information about one stream
  2.1252 -         * in an AVI file.
  2.1253 -         * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
  2.1254 -        typedef struct _avistreamheader {
  2.1255 -        FOURCC fcc;
  2.1256 -        DWORD  cb;
  2.1257 -        FOURCC fccType;
  2.1258 -        FOURCC fccHandler;
  2.1259 -        DWORD  dwFlags;
  2.1260 -        WORD   wPriority;
  2.1261 -        WORD   wLanguage;
  2.1262 -        DWORD  dwInitialFrames;
  2.1263 -        DWORD  dwScale;
  2.1264 -        DWORD  dwRate;
  2.1265 -        DWORD  dwStart;
  2.1266 -        DWORD  dwLength;
  2.1267 -        DWORD  dwSuggestedBufferSize;
  2.1268 -        DWORD  dwQuality;
  2.1269 -        DWORD  dwSampleSize;
  2.1270 -        struct {
  2.1271 -        short int left;
  2.1272 -        short int top;
  2.1273 -        short int right;
  2.1274 -        short int bottom;
  2.1275 -        }  rcFrame;
  2.1276 -        } AVISTREAMHEADER;
  2.1277 -         */
  2.1278 -        strhChunk.seekToStartOfData();
  2.1279 -        d = strhChunk.getOutputStream();
  2.1280 -        d.writeType("vids"); // fccType - vids for video stream
  2.1281 -        // Contains a FOURCC that specifies the type of the data contained in
  2.1282 -        // the stream. The following standard AVI values for video and audio are
  2.1283 -        // defined:
  2.1284 -        //
  2.1285 -        // FOURCC   Description
  2.1286 -        // 'auds'   Audio stream
  2.1287 -        // 'mids'   MIDI stream
  2.1288 -        // 'txts'   Text stream
  2.1289 -        // 'vids'   Video stream
  2.1290 -
  2.1291 -        switch (videoFormat) {
  2.1292 -            case RAW:
  2.1293 -                d.writeType("DIB "); // fccHandler - DIB for Raw RGB
  2.1294 -                break;
  2.1295 -            case RLE:
  2.1296 -                d.writeType("RLE "); // fccHandler - Microsoft RLE
  2.1297 -                break;
  2.1298 -            case JPG:
  2.1299 -                d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
  2.1300 -                break;
  2.1301 -            case PNG:
  2.1302 -            default:
  2.1303 -                d.writeType("png "); // fccHandler - png for PNG
  2.1304 -                break;
  2.1305 -        }
  2.1306 -        // Optionally, contains a FOURCC that identifies a specific data
  2.1307 -        // handler. The data handler is the preferred handler for the stream.
  2.1308 -        // For audio and video streams, this specifies the codec for decoding
  2.1309 -        // the stream.
  2.1310 -
  2.1311 -        if (imgDepth <= 8) {
  2.1312 -            d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
  2.1313 -        } else {
  2.1314 -            d.writeUInt(0); // dwFlags
  2.1315 -        }
  2.1316 -
  2.1317 -        // Contains any flags for the data stream. The bits in the high-order
  2.1318 -        // word of these flags are specific to the type of data contained in the
  2.1319 -        // stream. The following standard flags are defined:
  2.1320 -        //
  2.1321 -        // Value    Name        Description
  2.1322 -        //          AVISF_DISABLED 0x00000001 Indicates this stream should not
  2.1323 -        //                      be enabled by default.
  2.1324 -        //          AVISF_VIDEO_PALCHANGES 0x00010000
  2.1325 -        //                      Indicates this video stream contains
  2.1326 -        //                      palette changes. This flag warns the playback
  2.1327 -        //                      software that it will need to animate the
  2.1328 -        //                      palette.
  2.1329 -
  2.1330 -        d.writeUShort(0); // wPriority
  2.1331 -        // Specifies priority of a stream type. For example, in a file with
  2.1332 -        // multiple audio streams, the one with the highest priority might be
  2.1333 -        // the default stream.
  2.1334 -
  2.1335 -        d.writeUShort(0); // wLanguage
  2.1336 -        // Language tag.
  2.1337 -
  2.1338 -        d.writeUInt(0); // dwInitialFrames
  2.1339 -        // Specifies how far audio data is skewed ahead of the video frames in
  2.1340 -        // interleaved files. Typically, this is about 0.75 seconds. If you are
  2.1341 -        // creating interleaved files, specify the number of frames in the file
  2.1342 -        // prior to the initial frame of the AVI sequence in this member. For
  2.1343 -        // more information, see the remarks for the dwInitialFrames member of
  2.1344 -        // the AVIMAINHEADER structure.
  2.1345 -
  2.1346 -        d.writeUInt(timeScale); // dwScale
  2.1347 -        // Used with dwRate to specify the time scale that this stream will use.
  2.1348 -        // Dividing dwRate by dwScale gives the number of samples per second.
  2.1349 -        // For video streams, this is the frame rate. For audio streams, this
  2.1350 -        // rate corresponds to the time needed to play nBlockAlign bytes of
  2.1351 -        // audio, which for PCM audio is the just the sample rate.
  2.1352 -
  2.1353 -        d.writeUInt(frameRate); // dwRate
  2.1354 -        // See dwScale.
  2.1355 -
  2.1356 -        d.writeUInt(0); // dwStart
  2.1357 -        // Specifies the starting time for this stream. The units are defined by
  2.1358 -        // the dwRate and dwScale members in the main file header. Usually, this
  2.1359 -        // is zero, but it can specify a delay time for a stream that does not
  2.1360 -        // start concurrently with the file.
  2.1361 -
  2.1362 -        d.writeUInt(videoFrames.size()); // dwLength
  2.1363 -        // Specifies the length of this stream. The units are defined by the
  2.1364 -        // dwRate and dwScale members of the stream's header.
  2.1365 -
  2.1366 -        d.writeUInt(bufferSize); // dwSuggestedBufferSize
  2.1367 -        // Specifies how large a buffer should be used to read this stream.
  2.1368 -        // Typically, this contains a value corresponding to the largest chunk
  2.1369 -        // present in the stream. Using the correct buffer size makes playback
  2.1370 -        // more efficient. Use zero if you do not know the correct buffer size.
  2.1371 -
  2.1372 -        d.writeInt(-1); // dwQuality
  2.1373 -        // Specifies an indicator of the quality of the data in the stream.
  2.1374 -        // Quality is represented as a number between 0 and 10,000.
  2.1375 -        // For compressed data, this typically represents the value of the
  2.1376 -        // quality parameter passed to the compression software. If set to –1,
  2.1377 -        // drivers use the default quality value.
  2.1378 -
  2.1379 -        d.writeUInt(0); // dwSampleSize
  2.1380 -        // Specifies the size of a single sample of data. This is set to zero
  2.1381 -        // if the samples can vary in size. If this number is nonzero, then
  2.1382 -        // multiple samples of data can be grouped into a single chunk within
  2.1383 -        // the file. If it is zero, each sample of data (such as a video frame)
  2.1384 -        // must be in a separate chunk. For video streams, this number is
  2.1385 -        // typically zero, although it can be nonzero if all video frames are
  2.1386 -        // the same size. For audio streams, this number should be the same as
  2.1387 -        // the nBlockAlign member of the WAVEFORMATEX structure describing the
  2.1388 -        // audio.
  2.1389 -
  2.1390 -        d.writeUShort(0); // rcFrame.left
  2.1391 -        d.writeUShort(0); // rcFrame.top
  2.1392 -        d.writeUShort(imgWidth); // rcFrame.right
  2.1393 -        d.writeUShort(imgHeight); // rcFrame.bottom
  2.1394 -        // Specifies the destination rectangle for a text or video stream within
  2.1395 -        // the movie rectangle specified by the dwWidth and dwHeight members of
  2.1396 -        // the AVI main header structure. The rcFrame member is typically used
  2.1397 -        // in support of multiple video streams. Set this rectangle to the
  2.1398 -        // coordinates corresponding to the movie rectangle to update the whole
  2.1399 -        // movie rectangle. Units for this member are pixels. The upper-left
  2.1400 -        // corner of the destination rectangle is relative to the upper-left
  2.1401 -        // corner of the movie rectangle.
  2.1402 -
  2.1403 -        /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
  2.1404 -        /* -------------
  2.1405 -         * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
  2.1406 -        typedef struct tagBITMAPINFOHEADER {
  2.1407 -        DWORD  biSize;
  2.1408 -        LONG   biWidth;
  2.1409 -        LONG   biHeight;
  2.1410 -        WORD   biPlanes;
  2.1411 -        WORD   biBitCount;
  2.1412 -        DWORD  biCompression;
  2.1413 -        DWORD  biSizeImage;
  2.1414 -        LONG   biXPelsPerMeter;
  2.1415 -        LONG   biYPelsPerMeter;
  2.1416 -        DWORD  biClrUsed;
  2.1417 -        DWORD  biClrImportant;
  2.1418 -        } BITMAPINFOHEADER;
  2.1419 -         */
  2.1420 -        strfChunk.seekToStartOfData();
  2.1421 -        d = strfChunk.getOutputStream();
  2.1422 -        d.writeUInt(40); // biSize
  2.1423 -        // Specifies the number of bytes required by the structure. This value
  2.1424 -        // does not include the size of the color table or the size of the color
  2.1425 -        // masks, if they are appended to the end of structure.
  2.1426 -
  2.1427 -        d.writeInt(imgWidth); // biWidth
  2.1428 -        // Specifies the width of the bitmap, in pixels.
  2.1429 -
  2.1430 -        d.writeInt(imgHeight); // biHeight
  2.1431 -        // Specifies the height of the bitmap, in pixels.
  2.1432 -        //
  2.1433 -        // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
  2.1434 -        // a bottom-up DIB with the origin at the lower left corner. If biHeight
  2.1435 -        // is negative, the bitmap is a top-down DIB with the origin at the
  2.1436 -        // upper left corner.
  2.1437 -        // For YUV bitmaps, the bitmap is always top-down, regardless of the
  2.1438 -        // sign of biHeight. Decoders should offer YUV formats with postive
  2.1439 -        // biHeight, but for backward compatibility they should accept YUV
  2.1440 -        // formats with either positive or negative biHeight.
  2.1441 -        // For compressed formats, biHeight must be positive, regardless of
  2.1442 -        // image orientation.
  2.1443 -
  2.1444 -        d.writeShort(1); // biPlanes
  2.1445 -        // Specifies the number of planes for the target device. This value must
  2.1446 -        // be set to 1.
  2.1447 -
  2.1448 -        d.writeShort(imgDepth); // biBitCount
  2.1449 -        // Specifies the number of bits per pixel (bpp).  For uncompressed
  2.1450 -        // formats, this value is the average number of bits per pixel. For
  2.1451 -        // compressed formats, this value is the implied bit depth of the
  2.1452 -        // uncompressed image, after the image has been decoded.
  2.1453 -
  2.1454 -        switch (videoFormat) {
  2.1455 -            case RAW:
  2.1456 -            default:
  2.1457 -                d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
  2.1458 -                break;
  2.1459 -            case RLE:
  2.1460 -                if (imgDepth == 8) {
  2.1461 -                    d.writeInt(1); // biCompression - BI_RLE8
  2.1462 -                } else if (imgDepth == 4) {
  2.1463 -                    d.writeInt(2); // biCompression - BI_RLE4
  2.1464 -                } else {
  2.1465 -                    throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
  2.1466 -                }
  2.1467 -                break;
  2.1468 -            case JPG:
  2.1469 -                d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
  2.1470 -                break;
  2.1471 -            case PNG:
  2.1472 -                d.writeType("png "); // biCompression - png for PNG
  2.1473 -                break;
  2.1474 -        }
  2.1475 -        // For compressed video and YUV formats, this member is a FOURCC code,
  2.1476 -        // specified as a DWORD in little-endian order. For example, YUYV video
  2.1477 -        // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
  2.1478 -        // Codes.
  2.1479 -        //
  2.1480 -        // For uncompressed RGB formats, the following values are possible:
  2.1481 -        //
  2.1482 -        // Value        Description
  2.1483 -        // BI_RGB       0x00000000 Uncompressed RGB.
  2.1484 -        // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
  2.1485 -        //                         Valid for 16-bpp and 32-bpp bitmaps.
  2.1486 -        //
  2.1487 -        // Note that BI_JPG and BI_PNG are not valid video formats.
  2.1488 -        //
  2.1489 -        // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
  2.1490 -        // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
  2.1491 -        // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
  2.1492 -        // structure to determine the specific RGB type.
  2.1493 -
  2.1494 -        switch (videoFormat) {
  2.1495 -            case RAW:
  2.1496 -                d.writeInt(0); // biSizeImage
  2.1497 -                break;
  2.1498 -            case RLE:
  2.1499 -            case JPG:
  2.1500 -            case PNG:
  2.1501 -            default:
  2.1502 -                if (imgDepth == 4) {
  2.1503 -                    d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
  2.1504 -                } else {
  2.1505 -                    int bytesPerPixel = Math.max(1, imgDepth / 8);
  2.1506 -                    d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
  2.1507 -                }
  2.1508 -                break;
  2.1509 -        }
  2.1510 -        // Specifies the size, in bytes, of the image. This can be set to 0 for
  2.1511 -        // uncompressed RGB bitmaps.
  2.1512 -
  2.1513 -        d.writeInt(0); // biXPelsPerMeter
  2.1514 -        // Specifies the horizontal resolution, in pixels per meter, of the
  2.1515 -        // target device for the bitmap.
  2.1516 -
  2.1517 -        d.writeInt(0); // biYPelsPerMeter
  2.1518 -        // Specifies the vertical resolution, in pixels per meter, of the target
  2.1519 -        // device for the bitmap.
  2.1520 -
  2.1521 -        d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
  2.1522 -        // Specifies the number of color indices in the color table that are
  2.1523 -        // actually used by the bitmap.
  2.1524 -
  2.1525 -        d.writeInt(0); // biClrImportant
  2.1526 -        // Specifies the number of color indices that are considered important
  2.1527 -        // for displaying the bitmap. If this value is zero, all colors are
  2.1528 -        // important.
  2.1529 -
  2.1530 -        if (palette != null) {
  2.1531 -            for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
  2.1532 -                /*
  2.1533 -                 * typedef struct tagRGBQUAD {
  2.1534 -                BYTE rgbBlue;
  2.1535 -                BYTE rgbGreen;
  2.1536 -                BYTE rgbRed;
  2.1537 -                BYTE rgbReserved; // This member is reserved and must be zero.
  2.1538 -                } RGBQUAD;
  2.1539 -                 */
  2.1540 -                d.write(palette.getBlue(i));
  2.1541 -                d.write(palette.getGreen(i));
  2.1542 -                d.write(palette.getRed(i));
  2.1543 -                d.write(0);
  2.1544 -            }
  2.1545 -        }
  2.1546 -
  2.1547 -
  2.1548 -        // -----------------
  2.1549 -        aviChunk.finish();
  2.1550 -    }
  2.1551 -}
     3.1 --- a/src/com/aurellem/capture/AVIVideoRecorder.java	Tue Oct 25 12:29:40 2011 -0700
     3.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     3.3 @@ -1,46 +0,0 @@
     3.4 -package com.aurellem.capture;
     3.5 -
     3.6 -import java.awt.image.BufferedImage;
     3.7 -import java.io.File;
     3.8 -import java.io.IOException;
     3.9 -
    3.10 -
    3.11 -public class AVIVideoRecorder extends AbstractVideoRecorder{
    3.12 -
    3.13 -	AVIOutputStream out = null;
    3.14 -	boolean videoReady = false;
    3.15 -	BufferedImage frame;
    3.16 -	
    3.17 -	public AVIVideoRecorder(File output) throws IOException {
    3.18 -		super(output);
    3.19 -		this.out = new AVIOutputStream(output, AVIOutputStream.VideoFormat.PNG, 24);
    3.20 -		this.out.setVideoCompressionQuality(1.0f);
    3.21 -	}
    3.22 -
    3.23 -	
    3.24 -	public void initVideo (){
    3.25 -		frame = new BufferedImage(
    3.26 -				width, height,
    3.27 -				BufferedImage.TYPE_INT_RGB);
    3.28 -		out.setFrameRate((int) Math.round(this.fps));
    3.29 -		out.setTimeScale(1);
    3.30 -		out.setVideoDimension(width, height);
    3.31 -		this.videoReady = true;
    3.32 -	}
    3.33 -	
    3.34 -	public void record(BufferedImage rawFrame) {
    3.35 -		if (!videoReady){initVideo();}
    3.36 -		this.frame.getGraphics().drawImage(rawFrame, 0, 0, null);
    3.37 -		try {out.writeFrame(frame);}
    3.38 -		catch (IOException e){e.printStackTrace();}
    3.39 -	}
    3.40 -	
    3.41 -	public void finish() {
    3.42 -		System.out.println("I'm finished! <3");
    3.43 -		try {out.close();} 
    3.44 -		catch (IOException e) {e.printStackTrace();}
    3.45 -	}
    3.46 -
    3.47 -	
    3.48 -
    3.49 -}
     4.1 --- a/src/com/aurellem/capture/AbstractVideoRecorder.java	Tue Oct 25 12:29:40 2011 -0700
     4.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     4.3 @@ -1,143 +0,0 @@
     4.4 -package com.aurellem.capture;
     4.5 -
     4.6 -import java.awt.image.BufferedImage;
     4.7 -import java.io.File;
     4.8 -import java.io.IOException;
     4.9 -import java.nio.ByteBuffer;
    4.10 -
    4.11 -import com.jme3.app.Application;
    4.12 -import com.jme3.app.state.AppState;
    4.13 -import com.jme3.app.state.AppStateManager;
    4.14 -import com.jme3.post.SceneProcessor;
    4.15 -import com.jme3.renderer.Camera;
    4.16 -import com.jme3.renderer.RenderManager;
    4.17 -import com.jme3.renderer.ViewPort;
    4.18 -import com.jme3.renderer.queue.RenderQueue;
    4.19 -import com.jme3.texture.FrameBuffer;
    4.20 -import com.jme3.util.BufferUtils;
    4.21 -import com.jme3.util.Screenshots;
    4.22 -
    4.23 -/**
    4.24 - * <code>VideoProcessor</code> copies the frames it receives to video. 
    4.25 - * To ensure smooth video at a constant framerate, you should set your 
    4.26 - * application's timer to a new {@link IsoTimer}.  This class will 
    4.27 - * auto-determine the framerate of the video based on the time difference 
    4.28 - * between the first two frames it receives, although you can manually set 
    4.29 - * the framerate by calling <code>setFps(newFramerate)</code>.  Be sure to 
    4.30 - * place this processor *after* any other processors whose effects you want 
    4.31 - * to be included in the output video. You can attach multiple 
    4.32 - * <code>VideoProcessor</code>s to the same <code>ViewPort</code>.
    4.33 - * 
    4.34 - * For example,
    4.35 - * <code>
    4.36 - * someViewPort.addProcessor(new VideoProcessor(file1));
    4.37 - * someViewPort.addProcessor(someShadowRenderer);
    4.38 - * someViewPort.addProcessor(new VideoProcessor(file2));
    4.39 - * </code>
    4.40 - * 
    4.41 - * will output a video without shadows to <code>file1</code> and a video 
    4.42 - * with shadows to <code>file2</code>
    4.43 - * 
    4.44 - * @author Robert McIntyre
    4.45 - *
    4.46 - */
    4.47 -
    4.48 -public abstract class AbstractVideoRecorder 
    4.49 -	implements SceneProcessor, IVideoRecorder, AppState{
    4.50 -
    4.51 -	final File output;
    4.52 -	Camera camera;
    4.53 -	int width;
    4.54 -	int height;
    4.55 -	String targetFileName;
    4.56 -	FrameBuffer frameBuffer;
    4.57 -	Double fps = null;
    4.58 -	RenderManager renderManager;
    4.59 -	ByteBuffer byteBuffer;
    4.60 -	BufferedImage rawFrame;
    4.61 -	boolean isInitilized = false;
    4.62 -	boolean paused = false;
    4.63 -	
    4.64 -	public AbstractVideoRecorder(File output) throws IOException {
    4.65 -		this.output = output;
    4.66 -		this.targetFileName = this.output.getCanonicalPath();	
    4.67 -	}
    4.68 -	
    4.69 -		
    4.70 -	public double getFps() {return this.fps;}
    4.71 -	
    4.72 -	public AbstractVideoRecorder setFps(double fps) {
    4.73 -		this.fps = fps;
    4.74 -		return this;
    4.75 -	}
    4.76 -	
    4.77 -	public void initialize(RenderManager rm, ViewPort viewPort) {
    4.78 -		Camera camera = viewPort.getCamera();
    4.79 -		this.width = camera.getWidth();
    4.80 -		this.height = camera.getHeight();
    4.81 -				
    4.82 -		rawFrame = new BufferedImage(width, height, 
    4.83 -				BufferedImage.TYPE_4BYTE_ABGR);		
    4.84 -		byteBuffer = BufferUtils.createByteBuffer(width * height * 4 );
    4.85 -		this.renderManager = rm;
    4.86 -		this.isInitilized = true;
    4.87 -	}
    4.88 -
    4.89 -	public void reshape(ViewPort vp, int w, int h) {}
    4.90 -	
    4.91 -	public boolean isInitialized() {return this.isInitilized;}
    4.92 -
    4.93 -	public void preFrame(float tpf) {
    4.94 -		if (null == this.fps){
    4.95 -			this.setFps(1.0 / tpf);}
    4.96 -	}	
    4.97 -	
    4.98 -	public void postQueue(RenderQueue rq) {}
    4.99 -
   4.100 -	public void postFrame(FrameBuffer out) {
   4.101 -		if (!this.paused){
   4.102 -			byteBuffer.clear();
   4.103 -			renderManager.getRenderer().readFrameBuffer(out, byteBuffer);
   4.104 -			Screenshots.convertScreenShot(byteBuffer, rawFrame);
   4.105 -			record(rawFrame);
   4.106 -		}
   4.107 -	}
   4.108 -			
   4.109 -	public void cleanup(){
   4.110 -		this.pause();
   4.111 -		this.finish();
   4.112 -	};
   4.113 -	
   4.114 -	public void pause(){
   4.115 -		this.paused = true;
   4.116 -	}
   4.117 -	
   4.118 -	public void start(){
   4.119 -		this.paused = false;
   4.120 -	}
   4.121 -
   4.122 -	// methods from AppState
   4.123 -	public void initialize(AppStateManager stateManager, Application app) {}
   4.124 -
   4.125 -	public void setEnabled(boolean active) {
   4.126 -		if (active) {this.start();}
   4.127 -		else {this.pause();}
   4.128 -	}
   4.129 -
   4.130 -	public boolean isEnabled() {
   4.131 -		return this.paused;
   4.132 -	}
   4.133 -
   4.134 -	public void stateAttached(AppStateManager stateManager) {}
   4.135 -
   4.136 -
   4.137 -	public void stateDetached(AppStateManager stateManager) {
   4.138 -		this.pause();
   4.139 -		this.finish();
   4.140 -	}
   4.141 -
   4.142 -	public void update(float tpf) {}	
   4.143 -	public void render(RenderManager rm) {}
   4.144 -	public void postRender() {}
   4.145 -	
   4.146 -}
     5.1 --- a/src/com/aurellem/capture/AudioSend.java	Tue Oct 25 12:29:40 2011 -0700
     5.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     5.3 @@ -1,260 +0,0 @@
     5.4 -package com.aurellem.capture;
     5.5 -
     5.6 -import java.lang.reflect.Field;
     5.7 -import java.nio.ByteBuffer;
     5.8 -import java.util.HashMap;
     5.9 -import java.util.Vector;
    5.10 -import java.util.concurrent.CountDownLatch;
    5.11 -import java.util.logging.Level;
    5.12 -import java.util.logging.Logger;
    5.13 -
    5.14 -import org.lwjgl.LWJGLException;
    5.15 -import org.lwjgl.openal.AL;
    5.16 -import org.lwjgl.openal.AL10;
    5.17 -import org.lwjgl.openal.ALCdevice;
    5.18 -import org.lwjgl.openal.OpenALException;
    5.19 -
    5.20 -import com.jme3.audio.Listener;
    5.21 -import com.jme3.audio.lwjgl.LwjglAudioRenderer;
    5.22 -import com.jme3.math.Vector3f;
    5.23 -import com.jme3.util.BufferUtils;
    5.24 -
    5.25 -public class AudioSend 
    5.26 -	extends LwjglAudioRenderer implements MultiListener {
    5.27 -
    5.28 -	/**
    5.29 -	 * Keeps track of all the listeners which have been registered so far.
    5.30 -	 * The first element is <code>null</code>, which represents the zeroth 
    5.31 -	 * LWJGL listener which is created automatically.
    5.32 -	 */
    5.33 -	public Vector<Listener> listeners = new Vector<Listener>();
    5.34 -	
    5.35 -	public void initialize(){
    5.36 -		super.initialize();
    5.37 -		listeners.add(null);
    5.38 -	}
    5.39 -	
    5.40 -	/**
    5.41 -	 * This is to call the native methods which require the OpenAL device ID.
    5.42 -	 * currently it is obtained through reflection.
    5.43 -	 */
    5.44 -	private long deviceID;
    5.45 -	
    5.46 -	/**
    5.47 -	 * To ensure that <code>deviceID<code> and <code>listeners<code> are 
    5.48 -	 * properly initialized before any additional listeners are added.
    5.49 -	 */
    5.50 -	private CountDownLatch latch  = new CountDownLatch(1);
    5.51 -	
    5.52 -	private void waitForInit(){
    5.53 -		try {latch.await();} 
    5.54 -		catch (InterruptedException e) {e.printStackTrace();}
    5.55 -	}
    5.56 -	
    5.57 -	/**
    5.58 -	 * Each listener (including the main LWJGL listener) can be registered
    5.59 -	 * with a <code>SoundProcessor</code>, which this Renderer will call whenever 
    5.60 -	 * there is new audio data to be processed.
    5.61 -	 */
    5.62 -	public HashMap<Listener, SoundProcessor> soundProcessorMap =
    5.63 -		new HashMap<Listener, SoundProcessor>();
    5.64 -	
    5.65 -		
    5.66 -	/**
    5.67 -	 * Create a new slave context on the recorder device which will render all the 
    5.68 -	 * sounds in the main LWJGL context with respect to this listener.
    5.69 -	 */
    5.70 -	public void addListener(Listener l) {
    5.71 -		try {this.latch.await();} 
    5.72 -		catch (InterruptedException e) {e.printStackTrace();}
    5.73 -		this.addListener();
    5.74 -		this.listeners.add(l);
    5.75 -	}
    5.76 -	
    5.77 -	/**
    5.78 -	 * Whenever new data is rendered in the perspective of this listener, 
    5.79 -	 * this Renderer will send that data to the SoundProcessor of your choosing.
    5.80 -	 */
    5.81 -	public void registerSoundProcessor(Listener l, SoundProcessor sp) {
    5.82 -		this.soundProcessorMap.put(l, sp);
    5.83 -	}
    5.84 -	
    5.85 -	/**
    5.86 -	 * Registers a SoundProcessor for the main LWJGL context. IF all you want to 
    5.87 -	 * do is record the sound you would normally hear in your application, then 
    5.88 -	 * this is the only method you have to worry about.
    5.89 -	 */
    5.90 -	public void registerSoundProcessor(SoundProcessor sp){
    5.91 -		// register a sound processor for the default listener.
    5.92 -		this.soundProcessorMap.put(null, sp);		
    5.93 -	}
    5.94 -		
    5.95 -	private static final Logger logger = 
    5.96 -		Logger.getLogger(AudioSend.class.getName());
    5.97 -
    5.98 -	
    5.99 -	////////////   Native Methods
   5.100 -	
   5.101 -	/** This establishes the LWJGL context as the context which will be copies to all 
   5.102 -	 *  other contexts.  It must be called before any calls to <code>addListener();</code>
   5.103 -	 */
   5.104 -	public void initDevice(){
   5.105 -		ninitDevice(this.deviceID);}
   5.106 -	public static native void ninitDevice(long device);
   5.107 -	
   5.108 -	/**
   5.109 -	 * The send device does not automatically process sound.  This step function will cause 
   5.110 -	 * the desired number of samples to be processed for each listener.  The results will then 
   5.111 -	 * be available via calls to <code>getSamples()</code> for each listener.
   5.112 -	 * @param samples
   5.113 -	 */
   5.114 -	public void step(int samples){
   5.115 -		nstep(this.deviceID, samples);}
   5.116 -	public static native void nstep(long device, int samples);
   5.117 -
   5.118 -	/**
   5.119 -	 * Retrieve the final rendered sound for a particular listener.  <code>contextNum == 0</code>
   5.120 -	 * is the main LWJGL context.
   5.121 -	 * @param buffer
   5.122 -	 * @param samples
   5.123 -	 * @param contextNum
   5.124 -	 */
   5.125 -	public void getSamples(ByteBuffer buffer, int samples, int contextNum){
   5.126 -		ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);}
   5.127 -	public static native void ngetSamples(
   5.128 -			long device, ByteBuffer buffer, int position, int samples, int contextNum);
   5.129 -	
   5.130 -	/**
   5.131 -	 * Create an additional listener on the recorder device.  The device itself will manage 
   5.132 -	 * this listener and synchronize it with the main LWJGL context. Processed sound samples
   5.133 -	 * for this listener will be available via a call to <code>getSamples()</code> with 
   5.134 -	 * <code>contextNum</code> equal to the number of times this method has been called. 
   5.135 -	 */
   5.136 -	public void addListener(){naddListener(this.deviceID);}
   5.137 -	public static native void naddListener(long device);
   5.138 -	
   5.139 -	/**
   5.140 -	 * This will internally call <code>alListener3f<code> in the appropriate slave context and update
   5.141 -	 * that context's listener's parameters. Calling this for a number greater than the current 
   5.142 -	 * number of slave contexts will have no effect.
   5.143 -	 * @param pname
   5.144 -	 * @param v1
   5.145 -	 * @param v2
   5.146 -	 * @param v3
   5.147 -	 * @param contextNum
   5.148 -	 */
   5.149 -	public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){
   5.150 -		nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);}
   5.151 -	public static native void 
   5.152 -	nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum);
   5.153 -	
   5.154 -	/**
   5.155 -	 * This will internally call <code>alListenerf<code> in the appropriate slave context and update
   5.156 -	 * that context's listener's parameters. Calling this for a number greater than the current 
   5.157 -	 * number of slave contexts will have no effect.
   5.158 -	 * @param pname
   5.159 -	 * @param v1
   5.160 -	 * @param contextNum
   5.161 -	 */
   5.162 -	public void setNthListenerf(int pname, float v1, int contextNum){
   5.163 -		nsetNthListenerf(pname, v1, this.deviceID, contextNum);}
   5.164 -	public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum);
   5.165 -	
   5.166 -	/**
   5.167 -	 * Instead of taking whatever device is available on the system, this call 
   5.168 -	 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited
   5.169 -	 * capacity.  For each listener, the device renders it not to the sound device, but
   5.170 -	 * instead to buffers which it makes available via JNI.
   5.171 -	 */
   5.172 -	public void initInThread(){
   5.173 -		try{
   5.174 -            if (!AL.isCreated()){
   5.175 -                AL.create("Multiple Audio Send", 44100, 60, false);
   5.176 -            }
   5.177 -        }catch (OpenALException ex){
   5.178 -            logger.log(Level.SEVERE, "Failed to load audio library", ex);
   5.179 -            System.exit(1);
   5.180 -            return;
   5.181 -        }catch (LWJGLException ex){
   5.182 -            logger.log(Level.SEVERE, "Failed to load audio library", ex);
   5.183 -            System.exit(1);
   5.184 -            return;
   5.185 -        }
   5.186 -		super.initInThread();
   5.187 -
   5.188 -		ALCdevice device = AL.getDevice();
   5.189 -
   5.190 -		// RLM: use reflection to grab the ID of our device for use later.
   5.191 -		try {
   5.192 -			Field deviceIDField;
   5.193 -			deviceIDField = ALCdevice.class.getDeclaredField("device");
   5.194 -			deviceIDField.setAccessible(true);
   5.195 -			try {deviceID = (Long)deviceIDField.get(device);} 
   5.196 -			catch (IllegalArgumentException e) {e.printStackTrace();} 
   5.197 -			catch (IllegalAccessException e) {e.printStackTrace();}
   5.198 -			deviceIDField.setAccessible(false);} 
   5.199 -		catch (SecurityException e) {e.printStackTrace();} 
   5.200 -		catch (NoSuchFieldException e) {e.printStackTrace();}
   5.201 -		
   5.202 -		// the LWJGL context must be established as the master context before 
   5.203 -		// any other listeners can be created on this device.
   5.204 -		initDevice();
   5.205 -		// Now, everything is initialized, and it is safe to add more listeners.
   5.206 -		latch.countDown();
   5.207 -	}
   5.208 -
   5.209 -	
   5.210 -	public void cleanup(){
   5.211 -		for(SoundProcessor sp : this.soundProcessorMap.values()){
   5.212 -			sp.cleanup();
   5.213 -		}
   5.214 -		super.cleanup();
   5.215 -	}
   5.216 -	
   5.217 -	public void updateAllListeners(){
   5.218 -		for (int i = 0; i < this.listeners.size(); i++){
   5.219 -			Listener lis = this.listeners.get(i);
   5.220 -			if (null != lis){
   5.221 -				Vector3f location = lis.getLocation();
   5.222 -				Vector3f velocity = lis.getVelocity();
   5.223 -				Vector3f orientation = lis.getUp();
   5.224 -				float gain = lis.getVolume();
   5.225 -				setNthListener3f(AL10.AL_POSITION, 
   5.226 -						location.x, location.y, location.z, i);
   5.227 -				setNthListener3f(AL10.AL_VELOCITY, 
   5.228 -						velocity.x, velocity.y, velocity.z, i);
   5.229 -				setNthListener3f(AL10.AL_ORIENTATION,
   5.230 -						orientation.x, orientation.y, orientation.z, i);
   5.231 -				setNthListenerf(AL10.AL_GAIN, gain, i);
   5.232 -			}
   5.233 -		}
   5.234 -	}
   5.235 -	
   5.236 -	
   5.237 -	public final static int BYTES_PER_SAMPLE = 4;
   5.238 -	private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 
   5.239 -	
   5.240 -	public void dispatchAudio(float tpf){
   5.241 -		int samplesToGet = (int) (tpf * 44100);
   5.242 -		try {latch.await();} 
   5.243 -		catch (InterruptedException e) {e.printStackTrace();}
   5.244 -		step(samplesToGet);
   5.245 -		updateAllListeners();
   5.246 -		
   5.247 -		for (int i = 0; i < this.listeners.size(); i++){		
   5.248 -			buffer.clear();
   5.249 -			this.getSamples(buffer, samplesToGet, i);
   5.250 -			SoundProcessor sp = 
   5.251 -			this.soundProcessorMap.get(this.listeners.get(i));
   5.252 -			if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);}
   5.253 -		}
   5.254 -		
   5.255 -	}
   5.256 -		
   5.257 -	public void update(float tpf){
   5.258 -		super.update(tpf);
   5.259 -        dispatchAudio(tpf);
   5.260 -	}
   5.261 -	
   5.262 -}
   5.263 -
     6.1 --- a/src/com/aurellem/capture/Capture.java	Tue Oct 25 12:29:40 2011 -0700
     6.2 +++ b/src/com/aurellem/capture/Capture.java	Wed Oct 26 08:54:12 2011 -0700
     6.3 @@ -3,6 +3,7 @@
     6.4  import java.io.File;
     6.5  import java.io.IOException;
     6.6  
     6.7 +import com.aurellem.capture.video.AVIVideoRecorder;
     6.8  import com.jme3.app.Application;
     6.9  import com.jme3.math.ColorRGBA;
    6.10  
     7.1 --- a/src/com/aurellem/capture/DataChunkOutputStream.java	Tue Oct 25 12:29:40 2011 -0700
     7.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     7.3 @@ -1,217 +0,0 @@
     7.4 -/**
     7.5 - * @(#)DataChunkOutputStream.java  1.1  2011-01-17
     7.6 - *
     7.7 - * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
     7.8 - * All rights reserved.
     7.9 - *
    7.10 - * You may not use, copy or modify this file, except in compliance with the
    7.11 - * license agreement you entered into with Werner Randelshofer.
    7.12 - * For details see accompanying license terms.
    7.13 - */
    7.14 -package com.aurellem.capture;
    7.15 -
    7.16 -import java.io.*;
    7.17 -
    7.18 -/**
    7.19 - * This output stream filter supports common data types used inside
    7.20 - * of AVI RIFF Data Chunks.
    7.21 - *
    7.22 - * @author Werner Randelshofer
    7.23 - * @version 1.1 2011-01-17 Adds functionality for blocking flush and close.
    7.24 - * <br>1.0.1 2010-04-05 Removed unused constants.
    7.25 - * <br>1.0  2008-08-11 Created.
    7.26 - */
    7.27 -public class DataChunkOutputStream extends FilterOutputStream {
    7.28 -
    7.29 -    /**
    7.30 -     * The number of bytes written to the data output stream so far. 
    7.31 -     * If this counter overflows, it will be wrapped to Integer.MAX_VALUE.
    7.32 -     */
    7.33 -    protected long written;
    7.34 -
    7.35 -    /** Whether flush and close request shall be forwarded to underlying stream.*/
    7.36 -    private boolean forwardFlushAndClose;
    7.37 -
    7.38 -    public DataChunkOutputStream(OutputStream out) {
    7.39 -        this(out,true);
    7.40 -    }
    7.41 -    public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) {
    7.42 -        super(out);
    7.43 -        this.forwardFlushAndClose=forwardFlushAndClose;
    7.44 -    }
    7.45 -
    7.46 -    /**
    7.47 -     * Writes an chunk type identifier (4 bytes).
    7.48 -     * @param s A string with a length of 4 characters.
    7.49 -     */
    7.50 -    public void writeType(String s) throws IOException {
    7.51 -        if (s.length() != 4) {
    7.52 -            throw new IllegalArgumentException("type string must have 4 characters");
    7.53 -        }
    7.54 -
    7.55 -        try {
    7.56 -            out.write(s.getBytes("ASCII"), 0, 4);
    7.57 -            incCount(4);
    7.58 -        } catch (UnsupportedEncodingException e) {
    7.59 -            throw new InternalError(e.toString());
    7.60 -        }
    7.61 -    }
    7.62 -
    7.63 -    /**
    7.64 -     * Writes out a <code>byte</code> to the underlying output stream as 
    7.65 -     * a 1-byte value. If no exception is thrown, the counter 
    7.66 -     * <code>written</code> is incremented by <code>1</code>.
    7.67 -     *
    7.68 -     * @param      v   a <code>byte</code> value to be written.
    7.69 -     * @exception  IOException  if an I/O error occurs.
    7.70 -     * @see        java.io.FilterOutputStream#out
    7.71 -     */
    7.72 -    public final void writeByte(int v) throws IOException {
    7.73 -        out.write(v);
    7.74 -        incCount(1);
    7.75 -    }
    7.76 -
    7.77 -    /**
    7.78 -     * Writes <code>len</code> bytes from the specified byte array 
    7.79 -     * starting at offset <code>off</code> to the underlying output stream. 
    7.80 -     * If no exception is thrown, the counter <code>written</code> is 
    7.81 -     * incremented by <code>len</code>.
    7.82 -     *
    7.83 -     * @param      b     the data.
    7.84 -     * @param      off   the start offset in the data.
    7.85 -     * @param      len   the number of bytes to write.
    7.86 -     * @exception  IOException  if an I/O error occurs.
    7.87 -     * @see        java.io.FilterOutputStream#out
    7.88 -     */
    7.89 -    @Override
    7.90 -    public synchronized void write(byte b[], int off, int len)
    7.91 -            throws IOException {
    7.92 -        out.write(b, off, len);
    7.93 -        incCount(len);
    7.94 -    }
    7.95 -
    7.96 -    /**
    7.97 -     * Writes the specified byte (the low eight bits of the argument 
    7.98 -     * <code>b</code>) to the underlying output stream. If no exception 
    7.99 -     * is thrown, the counter <code>written</code> is incremented by 
   7.100 -     * <code>1</code>.
   7.101 -     * <p>
   7.102 -     * Implements the <code>write</code> method of <code>OutputStream</code>.
   7.103 -     *
   7.104 -     * @param      b   the <code>byte</code> to be written.
   7.105 -     * @exception  IOException  if an I/O error occurs.
   7.106 -     * @see        java.io.FilterOutputStream#out
   7.107 -     */
   7.108 -    @Override
   7.109 -    public synchronized void write(int b) throws IOException {
   7.110 -        out.write(b);
   7.111 -        incCount(1);
   7.112 -    }
   7.113 -
   7.114 -    /**
   7.115 -     * Writes an <code>int</code> to the underlying output stream as four
   7.116 -     * bytes, high byte first. If no exception is thrown, the counter 
   7.117 -     * <code>written</code> is incremented by <code>4</code>.
   7.118 -     *
   7.119 -     * @param      v   an <code>int</code> to be written.
   7.120 -     * @exception  IOException  if an I/O error occurs.
   7.121 -     * @see        java.io.FilterOutputStream#out
   7.122 -     */
   7.123 -    public void writeInt(int v) throws IOException {
   7.124 -        out.write((v >>> 0) & 0xff);
   7.125 -        out.write((v >>> 8) & 0xff);
   7.126 -        out.write((v >>> 16) & 0xff);
   7.127 -        out.write((v >>> 24) & 0xff);
   7.128 -        incCount(4);
   7.129 -    }
   7.130 -
   7.131 -    /**
   7.132 -     * Writes an unsigned 32 bit integer value.
   7.133 -     * 
   7.134 -     * @param v The value
   7.135 -     * @throws java.io.IOException
   7.136 -     */
   7.137 -    public void writeUInt(long v) throws IOException {
   7.138 -        out.write((int) ((v >>> 0) & 0xff));
   7.139 -        out.write((int) ((v >>> 8) & 0xff));
   7.140 -        out.write((int) ((v >>> 16) & 0xff));
   7.141 -        out.write((int) ((v >>> 24) & 0xff));
   7.142 -        incCount(4);
   7.143 -    }
   7.144 -
   7.145 -    /**
   7.146 -     * Writes a signed 16 bit integer value.
   7.147 -     * 
   7.148 -     * @param v The value
   7.149 -     * @throws java.io.IOException
   7.150 -     */
   7.151 -    public void writeShort(int v) throws IOException {
   7.152 -        out.write((int) ((v >>> 0) & 0xff));
   7.153 -        out.write((int) ((v >> 8) & 0xff));
   7.154 -        incCount(2);
   7.155 -    }
   7.156 -
   7.157 -    public void writeLong(long v) throws IOException {
   7.158 -        out.write((int) (v >>> 0) & 0xff);
   7.159 -        out.write((int) (v >>> 8) & 0xff);
   7.160 -        out.write((int) (v >>> 16) & 0xff);
   7.161 -        out.write((int) (v >>> 24) & 0xff);
   7.162 -        out.write((int) (v >>> 32) & 0xff);
   7.163 -        out.write((int) (v >>> 40) & 0xff);
   7.164 -        out.write((int) (v >>> 48) & 0xff);
   7.165 -        out.write((int) (v >>> 56) & 0xff);
   7.166 -        incCount(8);
   7.167 -    }
   7.168 -
   7.169 -    public void writeUShort(int v) throws IOException {
   7.170 -        out.write((int) ((v >>> 0) & 0xff));
   7.171 -        out.write((int) ((v >> 8) & 0xff));
   7.172 -        incCount(2);
   7.173 -    }
   7.174 -
   7.175 -    /**
   7.176 -     * Increases the written counter by the specified value
   7.177 -     * until it reaches Long.MAX_VALUE.
   7.178 -     */
   7.179 -    protected void incCount(int value) {
   7.180 -        long temp = written + value;
   7.181 -        if (temp < 0) {
   7.182 -            temp = Long.MAX_VALUE;
   7.183 -        }
   7.184 -        written = temp;
   7.185 -    }
   7.186 -
   7.187 -    /**
   7.188 -     * Returns the current value of the counter <code>written</code>, 
   7.189 -     * the number of bytes written to this data output stream so far.
   7.190 -     * If the counter overflows, it will be wrapped to Integer.MAX_VALUE.
   7.191 -     *
   7.192 -     * @return  the value of the <code>written</code> field.
   7.193 -     * @see     java.io.DataOutputStream#written
   7.194 -     */
   7.195 -    public final long size() {
   7.196 -        return written;
   7.197 -    }
   7.198 -    
   7.199 -    /**
   7.200 -     * Sets the value of the counter <code>written</code> to 0.
   7.201 -     */
   7.202 -    public void clearCount() {
   7.203 -        written = 0;
   7.204 -    }
   7.205 -
   7.206 -    @Override
   7.207 -    public void close() throws IOException {
   7.208 -        if (forwardFlushAndClose) {
   7.209 -        super.close();
   7.210 -        }
   7.211 -    }
   7.212 -    
   7.213 -    @Override
   7.214 -    public void flush() throws IOException {
   7.215 -        if (forwardFlushAndClose) {
   7.216 -        super.flush();
   7.217 -        }
   7.218 -    }
   7.219 -
   7.220 -}
     8.1 --- a/src/com/aurellem/capture/FileAudioRenderer.java	Tue Oct 25 12:29:40 2011 -0700
     8.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     8.3 @@ -1,85 +0,0 @@
     8.4 -package com.aurellem.capture;
     8.5 -
     8.6 -import com.jme3.audio.AudioData;
     8.7 -import com.jme3.audio.AudioNode;
     8.8 -import com.jme3.audio.AudioParam;
     8.9 -import com.jme3.audio.AudioRenderer;
    8.10 -import com.jme3.audio.Environment;
    8.11 -import com.jme3.audio.Listener;
    8.12 -import com.jme3.audio.ListenerParam;
    8.13 -
    8.14 -public class FileAudioRenderer implements AudioRenderer{
    8.15 -
    8.16 -
    8.17 -	public void setListener(Listener listener) {
    8.18 -		// TODO Auto-generated method stub
    8.19 -		
    8.20 -	}
    8.21 -
    8.22 -
    8.23 -	public void setEnvironment(Environment env) {
    8.24 -		// TODO Auto-generated method stub
    8.25 -		
    8.26 -	}
    8.27 -
    8.28 -	@Override
    8.29 -	public void playSourceInstance(AudioNode src) {
    8.30 -		// TODO Auto-generated method stub
    8.31 -		
    8.32 -	}
    8.33 -
    8.34 -	@Override
    8.35 -	public void playSource(AudioNode src) {
    8.36 -		// TODO Auto-generated method stub
    8.37 -		
    8.38 -	}
    8.39 -
    8.40 -	@Override
    8.41 -	public void pauseSource(AudioNode src) {
    8.42 -		// TODO Auto-generated method stub
    8.43 -		
    8.44 -	}
    8.45 -
    8.46 -	@Override
    8.47 -	public void stopSource(AudioNode src) {
    8.48 -		// TODO Auto-generated method stub
    8.49 -		
    8.50 -	}
    8.51 -
    8.52 -	@Override
    8.53 -	public void updateSourceParam(AudioNode src, AudioParam param) {
    8.54 -		// TODO Auto-generated method stub
    8.55 -		
    8.56 -	}
    8.57 -
    8.58 -	@Override
    8.59 -	public void updateListenerParam(Listener listener, ListenerParam param) {
    8.60 -		// TODO Auto-generated method stub
    8.61 -		
    8.62 -	}
    8.63 -
    8.64 -	@Override
    8.65 -	public void deleteAudioData(AudioData ad) {
    8.66 -		// TODO Auto-generated method stub
    8.67 -		
    8.68 -	}
    8.69 -
    8.70 -	@Override
    8.71 -	public void initialize() {
    8.72 -		// TODO Auto-generated method stub
    8.73 -		
    8.74 -	}
    8.75 -
    8.76 -	@Override
    8.77 -	public void update(float tpf) {
    8.78 -		// TODO Auto-generated method stub
    8.79 -		
    8.80 -	}
    8.81 -
    8.82 -	@Override
    8.83 -	public void cleanup() {
    8.84 -		// TODO Auto-generated method stub
    8.85 -		
    8.86 -	}
    8.87 -
    8.88 -}
     9.1 --- a/src/com/aurellem/capture/IVideoRecorder.java	Tue Oct 25 12:29:40 2011 -0700
     9.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     9.3 @@ -1,21 +0,0 @@
     9.4 -package com.aurellem.capture;
     9.5 -
     9.6 -import java.awt.image.BufferedImage;
     9.7 -
     9.8 -public interface IVideoRecorder{
     9.9 -
    9.10 -	void record(BufferedImage image);
    9.11 -	
    9.12 -	void pause();
    9.13 -	
    9.14 -	void start();
    9.15 -	
    9.16 -	/**
    9.17 -	 * closes the video file, writing appropriate headers, trailers, etc.
    9.18 -	 * After this is called, no more recording can be done.
    9.19 -	 */
    9.20 -	void finish();
    9.21 -	
    9.22 -}
    9.23 -
    9.24 -
    10.1 --- a/src/com/aurellem/capture/ImageOutputStreamAdapter.java	Tue Oct 25 12:29:40 2011 -0700
    10.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    10.3 @@ -1,144 +0,0 @@
    10.4 -/*
    10.5 - * @(#)ImageOutputStreamAdapter.java  1.1  2011-01-07
    10.6 - *
    10.7 - * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland.
    10.8 - * All rights reserved.
    10.9 - *
   10.10 - * You may not use, copy or modify this file, except in compliance with the
   10.11 - * license agreement you entered into with Werner Randelshofer.
   10.12 - * For details see accompanying license terms.
   10.13 - */
   10.14 -package com.aurellem.capture;
   10.15 -
   10.16 -import java.io.FilterOutputStream;
   10.17 -import java.io.IOException;
   10.18 -import java.io.OutputStream;
   10.19 -import javax.imageio.stream.ImageOutputStream;
   10.20 -
   10.21 -/**
   10.22 - * Adapts an {@code ImageOutputStream} for classes requiring an
   10.23 - * {@code OutputStream}.
   10.24 - *
   10.25 - * @author Werner Randelshofer
   10.26 - * @version 1.1 2011-01-07 Fixes performance.
   10.27 - * <br>1.0 2010-12-26 Created.
   10.28 - */
   10.29 -public class ImageOutputStreamAdapter extends OutputStream {
   10.30 -
   10.31 -    /**
   10.32 -     * The underlying output stream to be filtered.
   10.33 -     */
   10.34 -    protected ImageOutputStream out;
   10.35 -
   10.36 -    /**
   10.37 -     * Creates an output stream filter built on top of the specified
   10.38 -     * underlying output stream.
   10.39 -     *
   10.40 -     * @param   out   the underlying output stream to be assigned to
   10.41 -     *                the field <tt>this.out</tt> for later use, or
   10.42 -     *                <code>null</code> if this instance is to be
   10.43 -     *                created without an underlying stream.
   10.44 -     */
   10.45 -    public ImageOutputStreamAdapter(ImageOutputStream out) {
   10.46 -        this.out = out;
   10.47 -    }
   10.48 -
   10.49 -    /**
   10.50 -     * Writes the specified <code>byte</code> to this output stream.
   10.51 -     * <p>
   10.52 -     * The <code>write</code> method of <code>FilterOutputStream</code>
   10.53 -     * calls the <code>write</code> method of its underlying output stream,
   10.54 -     * that is, it performs <tt>out.write(b)</tt>.
   10.55 -     * <p>
   10.56 -     * Implements the abstract <tt>write</tt> method of <tt>OutputStream</tt>.
   10.57 -     *
   10.58 -     * @param      b   the <code>byte</code>.
   10.59 -     * @exception  IOException  if an I/O error occurs.
   10.60 -     */
   10.61 -    @Override
   10.62 -    public void write(int b) throws IOException {
   10.63 -        out.write(b);
   10.64 -    }
   10.65 -
   10.66 -    /**
   10.67 -     * Writes <code>b.length</code> bytes to this output stream.
   10.68 -     * <p>
   10.69 -     * The <code>write</code> method of <code>FilterOutputStream</code>
   10.70 -     * calls its <code>write</code> method of three arguments with the
   10.71 -     * arguments <code>b</code>, <code>0</code>, and
   10.72 -     * <code>b.length</code>.
   10.73 -     * <p>
   10.74 -     * Note that this method does not call the one-argument
   10.75 -     * <code>write</code> method of its underlying stream with the single
   10.76 -     * argument <code>b</code>.
   10.77 -     *
   10.78 -     * @param      b   the data to be written.
   10.79 -     * @exception  IOException  if an I/O error occurs.
   10.80 -     * @see        java.io.FilterOutputStream#write(byte[], int, int)
   10.81 -     */
   10.82 -    @Override
   10.83 -    public void write(byte b[]) throws IOException {
   10.84 -        write(b, 0, b.length);
   10.85 -    }
   10.86 -
   10.87 -    /**
   10.88 -     * Writes <code>len</code> bytes from the specified
   10.89 -     * <code>byte</code> array starting at offset <code>off</code> to
   10.90 -     * this output stream.
   10.91 -     * <p>
   10.92 -     * The <code>write</code> method of <code>FilterOutputStream</code>
   10.93 -     * calls the <code>write</code> method of one argument on each
   10.94 -     * <code>byte</code> to output.
   10.95 -     * <p>
   10.96 -     * Note that this method does not call the <code>write</code> method
   10.97 -     * of its underlying input stream with the same arguments. Subclasses
   10.98 -     * of <code>FilterOutputStream</code> should provide a more efficient
   10.99 -     * implementation of this method.
  10.100 -     *
  10.101 -     * @param      b     the data.
  10.102 -     * @param      off   the start offset in the data.
  10.103 -     * @param      len   the number of bytes to write.
  10.104 -     * @exception  IOException  if an I/O error occurs.
  10.105 -     * @see        java.io.FilterOutputStream#write(int)
  10.106 -     */
  10.107 -    @Override
  10.108 -    public void write(byte b[], int off, int len) throws IOException {
  10.109 -        out.write(b,off,len);
  10.110 -    }
  10.111 -
  10.112 -    /**
  10.113 -     * Flushes this output stream and forces any buffered output bytes
  10.114 -     * to be written out to the stream.
  10.115 -     * <p>
  10.116 -     * The <code>flush</code> method of <code>FilterOutputStream</code>
  10.117 -     * calls the <code>flush</code> method of its underlying output stream.
  10.118 -     *
  10.119 -     * @exception  IOException  if an I/O error occurs.
  10.120 -     * @see        java.io.FilterOutputStream#out
  10.121 -     */
  10.122 -    @Override
  10.123 -    public void flush() throws IOException {
  10.124 -        out.flush();
  10.125 -    }
  10.126 -
  10.127 -    /**
  10.128 -     * Closes this output stream and releases any system resources
  10.129 -     * associated with the stream.
  10.130 -     * <p>
  10.131 -     * The <code>close</code> method of <code>FilterOutputStream</code>
  10.132 -     * calls its <code>flush</code> method, and then calls the
  10.133 -     * <code>close</code> method of its underlying output stream.
  10.134 -     *
  10.135 -     * @exception  IOException  if an I/O error occurs.
  10.136 -     * @see        java.io.FilterOutputStream#flush()
  10.137 -     * @see        java.io.FilterOutputStream#out
  10.138 -     */
  10.139 -    @Override
  10.140 -    public void close() throws IOException {
  10.141 -        try {
  10.142 -            flush();
  10.143 -        } finally {
  10.144 -            out.close();
  10.145 -        }
  10.146 -    }
  10.147 -}
    11.1 --- a/src/com/aurellem/capture/Main.java	Tue Oct 25 12:29:40 2011 -0700
    11.2 +++ b/src/com/aurellem/capture/Main.java	Wed Oct 26 08:54:12 2011 -0700
    11.3 @@ -16,6 +16,8 @@
    11.4  import java.io.*;
    11.5  import java.util.Random;
    11.6  
    11.7 +import com.aurellem.capture.video.AVIOutputStream;
    11.8 +
    11.9  
   11.10  /**
   11.11   * Main.
    12.1 --- a/src/com/aurellem/capture/MicrosoftRLEEncoder.java	Tue Oct 25 12:29:40 2011 -0700
    12.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    12.3 @@ -1,400 +0,0 @@
    12.4 -/*
    12.5 - * @(#)AppleRLEEncoder.java  1.1.1  2011-01-17
    12.6 - *
    12.7 - * Copyright © 2011 Werner Randelshofer, Immensee, Switzerland.
    12.8 - * All rights reserved.
    12.9 - *
   12.10 - * You may not use, copy or modify this file, except in compliance with the
   12.11 - * license agreement you entered into with Werner Randelshofer.
   12.12 - * For details see accompanying license terms.
   12.13 - */
   12.14 -package com.aurellem.capture;
   12.15 -
   12.16 -import java.io.ByteArrayOutputStream;
   12.17 -import java.io.IOException;
   12.18 -import java.io.OutputStream;
   12.19 -import java.util.Arrays;
   12.20 -
   12.21 -/**
   12.22 - * Implements the run length encoding of the Microsoft RLE format.
   12.23 - * <p>
   12.24 - * Each line of a frame is compressed individually. A line consists of two-byte
   12.25 - * op-codes optionally followed by data. The end of the line is marked with
   12.26 - * the EOL op-code.
   12.27 - * <p>
   12.28 - * The following op-codes are supported:
   12.29 - * <ul>
   12.30 - * <li>{@code 0x00 0x00}
   12.31 - * <br>Marks the end of a line.</li>
   12.32 - *
   12.33 - * <li>{@code  0x00 0x01}
   12.34 - * <br>Marks the end of the bitmap.</li>
   12.35 - *
   12.36 - * <li>{@code 0x00 0x02 x y}
   12.37 - * <br> Marks a delta (skip). {@code x} and {@code y}
   12.38 - * indicate the horizontal and vertical offset from the current position.
   12.39 - * {@code x} and {@code y} are unsigned 8-bit values.</li>
   12.40 - *
   12.41 - * <li>{@code 0x00 n data{n} 0x00?}
   12.42 - * <br> Marks a literal run. {@code n}
   12.43 - * gives the number of data bytes that follow. {@code n} must be between 3 and
   12.44 - * 255. If n is odd, a pad byte with the value 0x00 must be added.
   12.45 - * </li>
   12.46 - * <li>{@code n data}
   12.47 - * <br> Marks a repetition. {@code n}
   12.48 - * gives the number of times the data byte is repeated. {@code n} must be
   12.49 - * between 1 and 255.
   12.50 - * </li>
   12.51 - * </ul>
   12.52 - * Example:
   12.53 - * <pre>
   12.54 - * Compressed data         Expanded data
   12.55 - *
   12.56 - * 03 04                   04 04 04
   12.57 - * 05 06                   06 06 06 06 06
   12.58 - * 00 03 45 56 67 00       45 56 67
   12.59 - * 02 78                   78 78
   12.60 - * 00 02 05 01             Move 5 right and 1 down
   12.61 - * 02 78                   78 78
   12.62 - * 00 00                   End of line
   12.63 - * 09 1E                   1E 1E 1E 1E 1E 1E 1E 1E 1E
   12.64 - * 00 01                   End of RLE bitmap
   12.65 - * </pre>
   12.66 - *
   12.67 - * References:<br/>
   12.68 - * <a href="http://wiki.multimedia.cx/index.php?title=Microsoft_RLE">http://wiki.multimedia.cx/index.php?title=Microsoft_RLE</a><br>
   12.69 - *
   12.70 - * @author Werner Randelshofer
   12.71 - * @version 1.1.1 2011-01-17 Removes unused imports.
   12.72 - * <br>1.1 2011-01-07 Improves performance.
   12.73 - * <br>1.0 2011-01-05 Created.
   12.74 - */
   12.75 -public class MicrosoftRLEEncoder {
   12.76 -
   12.77 -    private SeekableByteArrayOutputStream tempSeek=new SeekableByteArrayOutputStream();
   12.78 -    private DataChunkOutputStream temp=new DataChunkOutputStream(tempSeek);
   12.79 -
   12.80 -    /** Encodes a 8-bit key frame.
   12.81 -     *
   12.82 -     * @param temp The output stream. Must be set to Big-Endian.
   12.83 -     * @param data The image data.
   12.84 -     * @param offset The offset to the first pixel in the data array.
   12.85 -     * @param length The width of the image in data elements.
   12.86 -     * @param step The number to add to offset to get to the next scanline.
   12.87 -     */
   12.88 -    public void writeKey8(OutputStream out, byte[] data, int offset, int length, int step, int height)
   12.89 -            throws IOException {
   12.90 -        tempSeek.reset();
   12.91 -        int ymax = offset + height * step;
   12.92 -        int upsideDown = ymax-step+offset;
   12.93 -
   12.94 -        // Encode each scanline separately
   12.95 -        for (int y = offset; y < ymax; y += step) {
   12.96 -            int xy = upsideDown-y;
   12.97 -            int xymax = xy + length;
   12.98 -
   12.99 -            int literalCount = 0;
  12.100 -            int repeatCount = 0;
  12.101 -            for (; xy < xymax; ++xy) {
  12.102 -                // determine repeat count
  12.103 -                byte v = data[xy];
  12.104 -                for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) {
  12.105 -                    if (data[xy] != v) {
  12.106 -                        break;
  12.107 -                    }
  12.108 -                }
  12.109 -                xy -= repeatCount;
  12.110 -                if (repeatCount < 3) {
  12.111 -                    literalCount++;
  12.112 -                    if (literalCount == 254) {
  12.113 -                        temp.write(0);temp.write(literalCount); // Literal OP-code
  12.114 -                        temp.write(data, xy - literalCount + 1, literalCount);
  12.115 -                        literalCount = 0;
  12.116 -                    }
  12.117 -                } else {
  12.118 -                    if (literalCount > 0) {
  12.119 -                        if (literalCount < 3) {
  12.120 -                            for (; literalCount > 0; --literalCount) {
  12.121 -                                temp.write(1); // Repeat OP-code
  12.122 -                                temp.write(data[xy - literalCount]);
  12.123 -                            }
  12.124 -                        } else {
  12.125 -                            temp.write(0);temp.write(literalCount); // Literal OP-code
  12.126 -                            temp.write(data, xy - literalCount, literalCount);
  12.127 -                            if (literalCount % 2 == 1) {
  12.128 -                                temp.write(0); // pad byte
  12.129 -                            }
  12.130 -                            literalCount = 0;
  12.131 -                        }
  12.132 -                    }
  12.133 -                    temp.write(repeatCount); // Repeat OP-code
  12.134 -                    temp.write(v);
  12.135 -                    xy += repeatCount - 1;
  12.136 -                }
  12.137 -            }
  12.138 -
  12.139 -            // flush literal run
  12.140 -            if (literalCount > 0) {
  12.141 -                if (literalCount < 3) {
  12.142 -                    for (; literalCount > 0; --literalCount) {
  12.143 -                        temp.write(1); // Repeat OP-code
  12.144 -                        temp.write(data[xy - literalCount]);
  12.145 -                    }
  12.146 -                } else {
  12.147 -                    temp.write(0);temp.write(literalCount);
  12.148 -                    temp.write(data, xy - literalCount, literalCount);
  12.149 -                    if (literalCount % 2 == 1) {
  12.150 -                        temp.write(0); // pad byte
  12.151 -                    }
  12.152 -                }
  12.153 -                literalCount = 0;
  12.154 -            }
  12.155 -
  12.156 -            temp.write(0);temp.write(0x0000);// End of line
  12.157 -        }
  12.158 -        temp.write(0);temp.write(0x0001);// End of bitmap
  12.159 -        tempSeek.toOutputStream(out);
  12.160 -    }
  12.161 -
  12.162 -    /** Encodes a 8-bit delta frame.
  12.163 -     *
  12.164 -     * @param temp The output stream. Must be set to Big-Endian.
  12.165 -     * @param data The image data.
  12.166 -     * @param prev The image data of the previous frame.
  12.167 -     * @param offset The offset to the first pixel in the data array.
  12.168 -     * @param length The width of the image in data elements.
  12.169 -     * @param step The number to add to offset to get to the next scanline.
  12.170 -     */
  12.171 -    public void writeDelta8(OutputStream out, byte[] data, byte[] prev, int offset, int length, int step, int height)
  12.172 -            throws IOException {
  12.173 -
  12.174 -tempSeek.reset();
  12.175 -        // Determine whether we can skip lines at the beginning
  12.176 -        int ymin;
  12.177 -        int ymax = offset + height * step;
  12.178 -        int upsideDown = ymax-step+offset;
  12.179 -        scanline:
  12.180 -        for (ymin = offset; ymin < ymax; ymin += step) {
  12.181 -            int xy = upsideDown-ymin;
  12.182 -            int xymax = xy + length;
  12.183 -            for (; xy < xymax; ++xy) {
  12.184 -                if (data[xy] != prev[xy]) {
  12.185 -                    break scanline;
  12.186 -                }
  12.187 -            }
  12.188 -        }
  12.189 -
  12.190 -        if (ymin == ymax) {
  12.191 -            // => Frame is identical to previous one
  12.192 -            temp.write(0);temp.write(0x0001); // end of bitmap
  12.193 -            return;
  12.194 -        }
  12.195 -
  12.196 -        if (ymin > offset) {
  12.197 -            int verticalOffset = ymin / step;
  12.198 -            while (verticalOffset > 255) {
  12.199 -                temp.write(0);temp.write(0x0002); // Skip OP-code
  12.200 -                temp.write(0); // horizontal offset
  12.201 -                temp.write(255); // vertical offset
  12.202 -                verticalOffset -= 255;
  12.203 -            }
  12.204 -            if (verticalOffset == 1) {
  12.205 -                temp.write(0);temp.write(0x0000); // End of line OP-code
  12.206 -            } else {
  12.207 -                temp.write(0);temp.write(0x0002); // Skip OP-code
  12.208 -                temp.write(0); // horizontal offset
  12.209 -                temp.write(verticalOffset); // vertical offset
  12.210 -            }
  12.211 -        }
  12.212 -
  12.213 -
  12.214 -        // Determine whether we can skip lines at the end
  12.215 -        scanline:
  12.216 -        for (; ymax > ymin; ymax -= step) {
  12.217 -            int xy = upsideDown-ymax+step;
  12.218 -            int xymax = xy + length;
  12.219 -            for (; xy < xymax; ++xy) {
  12.220 -                if (data[xy] != prev[xy]) {
  12.221 -                    break scanline;
  12.222 -                }
  12.223 -            }
  12.224 -        }
  12.225 -        //System.out.println("MicrosoftRLEEncoder ymin:" + ymin / step + " ymax" + ymax / step);
  12.226 -
  12.227 -
  12.228 -        // Encode each scanline
  12.229 -        int verticalOffset = 0;
  12.230 -        for (int y = ymin; y < ymax; y += step) {
  12.231 -            int xy = upsideDown-y;
  12.232 -            int xymax = xy + length;
  12.233 -
  12.234 -            // determine skip count
  12.235 -            int skipCount = 0;
  12.236 -            for (; xy < xymax; ++xy, ++skipCount) {
  12.237 -                if (data[xy] != prev[xy]) {
  12.238 -                    break;
  12.239 -                }
  12.240 -            }
  12.241 -            if (skipCount == length) {
  12.242 -                // => the entire line can be skipped
  12.243 -                ++verticalOffset;
  12.244 -                if (verticalOffset == 255) {
  12.245 -                    temp.write(0);temp.write(0x0002); // Skip OP-code
  12.246 -                    temp.write(0); // horizontal offset
  12.247 -                    temp.write(255); // vertical offset
  12.248 -                    verticalOffset = 0;
  12.249 -                }
  12.250 -                continue;
  12.251 -            }
  12.252 -
  12.253 -            if (verticalOffset > 0 || skipCount > 0) {
  12.254 -                if (verticalOffset == 1 && skipCount == 0) {
  12.255 -                    temp.write(0);temp.write(0x0000); // End of line OP-code
  12.256 -                } else {
  12.257 -                    temp.write(0);temp.write(0x0002); // Skip OP-code
  12.258 -                    temp.write(Math.min(255, skipCount)); // horizontal offset
  12.259 -                    skipCount -= 255;
  12.260 -                    temp.write(verticalOffset); // vertical offset
  12.261 -                }
  12.262 -                verticalOffset = 0;
  12.263 -            }
  12.264 -            while (skipCount > 0) {
  12.265 -                temp.write(0);temp.write(0x0002); // Skip OP-code
  12.266 -                temp.write(Math.min(255, skipCount)); // horizontal offset
  12.267 -                temp.write(0); // vertical offset
  12.268 -                skipCount -= 255;
  12.269 -            }
  12.270 -
  12.271 -            int literalCount = 0;
  12.272 -            int repeatCount = 0;
  12.273 -            for (; xy < xymax; ++xy) {
  12.274 -                // determine skip count
  12.275 -                for (skipCount = 0; xy < xymax; ++xy, ++skipCount) {
  12.276 -                    if (data[xy] != prev[xy]) {
  12.277 -                        break;
  12.278 -                    }
  12.279 -                }
  12.280 -                xy -= skipCount;
  12.281 -
  12.282 -                // determine repeat count
  12.283 -                byte v = data[xy];
  12.284 -                for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) {
  12.285 -                    if (data[xy] != v) {
  12.286 -                        break;
  12.287 -                    }
  12.288 -                }
  12.289 -                xy -= repeatCount;
  12.290 -
  12.291 -                if (skipCount < 4 && xy + skipCount < xymax && repeatCount < 3) {
  12.292 -                    literalCount++;
  12.293 -                    if (literalCount == 254) {
  12.294 -                        temp.write(0);temp.write(literalCount); // Literal OP-code
  12.295 -                        temp.write(data, xy - literalCount + 1, literalCount);
  12.296 -                        literalCount = 0;
  12.297 -                    }
  12.298 -                } else {
  12.299 -                    if (literalCount > 0) {
  12.300 -                        if (literalCount < 3) {
  12.301 -                            for (; literalCount > 0; --literalCount) {
  12.302 -                                temp.write(1); // Repeat OP-code
  12.303 -                                temp.write(data[xy - literalCount]);
  12.304 -                            }
  12.305 -                        } else {
  12.306 -                            temp.write(0);temp.write(literalCount);
  12.307 -                            temp.write(data, xy - literalCount, literalCount);
  12.308 -                            if (literalCount % 2 == 1) {
  12.309 -                                temp.write(0); // pad byte
  12.310 -                            }
  12.311 -                        }
  12.312 -                        literalCount = 0;
  12.313 -                    }
  12.314 -                    if (xy + skipCount == xymax) {
  12.315 -                        // => we can skip until the end of the line without
  12.316 -                        //    having to write an op-code
  12.317 -                        xy += skipCount - 1;
  12.318 -                    } else if (skipCount >= repeatCount) {
  12.319 -                        while (skipCount > 255) {
  12.320 -                            temp.write(0);temp.write(0x0002); // Skip OP-code
  12.321 -                            temp.write(255);
  12.322 -                            temp.write(0);
  12.323 -                            xy += 255;
  12.324 -                            skipCount -= 255;
  12.325 -                        }
  12.326 -                        temp.write(0);temp.write(0x0002); // Skip OP-code
  12.327 -                        temp.write(skipCount);
  12.328 -                        temp.write(0);
  12.329 -                        xy += skipCount - 1;
  12.330 -                    } else {
  12.331 -                        temp.write(repeatCount); // Repeat OP-code
  12.332 -                        temp.write(v);
  12.333 -                        xy += repeatCount - 1;
  12.334 -                    }
  12.335 -                }
  12.336 -            }
  12.337 -
  12.338 -            // flush literal run
  12.339 -            if (literalCount > 0) {
  12.340 -                if (literalCount < 3) {
  12.341 -                    for (; literalCount > 0; --literalCount) {
  12.342 -                        temp.write(1); // Repeat OP-code
  12.343 -                        temp.write(data[xy - literalCount]);
  12.344 -                    }
  12.345 -                } else {
  12.346 -                    temp.write(0);temp.write(literalCount);
  12.347 -                    temp.write(data, xy - literalCount, literalCount);
  12.348 -                    if (literalCount % 2 == 1) {
  12.349 -                        temp.write(0); // pad byte
  12.350 -                    }
  12.351 -                }
  12.352 -            }
  12.353 -
  12.354 -            temp.write(0);temp.write(0x0000); // End of line OP-code
  12.355 -        }
  12.356 -
  12.357 -        temp.write(0);temp.write(0x0001);// End of bitmap
  12.358 -        tempSeek.toOutputStream(out);
  12.359 -    }
  12.360 -
  12.361 -    public static void main(String[] args) {
  12.362 -        byte[] data = {//
  12.363 -            8, 2, 3, 4, 4, 3,7,7,7, 8,//
  12.364 -            8, 1, 1, 1, 1, 2,7,7,7, 8,//
  12.365 -            8, 0, 2, 0, 0, 0,7,7,7, 8,//
  12.366 -            8, 2, 2, 3, 4, 4,7,7,7, 8,//
  12.367 -            8, 1, 4, 4, 4, 5,7,7,7, 8};
  12.368 -
  12.369 -
  12.370 -        byte[] prev = {//
  12.371 -            8, 3, 3, 3, 3, 3,7,7,7, 8,//
  12.372 -            8, 1, 1, 1, 1, 1,7,7,7, 8, //
  12.373 -            8, 5, 5, 5, 5, 0,7,7,7, 8,//
  12.374 -            8, 2, 2, 0, 0, 0,7,7,7, 8,//
  12.375 -            8, 2, 0, 0, 0, 5,7,7,7, 8};
  12.376 -        ByteArrayOutputStream buf = new ByteArrayOutputStream();
  12.377 -        DataChunkOutputStream out = new DataChunkOutputStream(buf);
  12.378 -        MicrosoftRLEEncoder enc = new MicrosoftRLEEncoder();
  12.379 -
  12.380 -        try {
  12.381 -            enc.writeDelta8(out, data, prev, 1, 8, 10, 5);
  12.382 -            //enc.writeKey8(out, data, 1, 8, 10,5);
  12.383 -            out.close();
  12.384 -
  12.385 -            byte[] result = buf.toByteArray();
  12.386 -            System.out.println("size:" + result.length);
  12.387 -            System.out.println(Arrays.toString(result));
  12.388 -            System.out.print("0x [");
  12.389 -
  12.390 -            for (int i = 0; i < result.length; i++) {
  12.391 -                if (i != 0) {
  12.392 -                    System.out.print(',');
  12.393 -                }
  12.394 -                String hex = "00" + Integer.toHexString(result[i]);
  12.395 -                System.out.print(hex.substring(hex.length() - 2));
  12.396 -            }
  12.397 -            System.out.println(']');
  12.398 -
  12.399 -        } catch (IOException ex) {
  12.400 -            ex.printStackTrace();
  12.401 -        }
  12.402 -    }
  12.403 -}
    13.1 --- a/src/com/aurellem/capture/MultiListener.java	Tue Oct 25 12:29:40 2011 -0700
    13.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    13.3 @@ -1,11 +0,0 @@
    13.4 -package com.aurellem.capture;
    13.5 -
    13.6 -import com.jme3.audio.Listener;
    13.7 -
    13.8 -public interface MultiListener {
    13.9 -
   13.10 -	void addListener(Listener l);
   13.11 -	void registerSoundProcessor(Listener l, SoundProcessor sp);
   13.12 -	void registerSoundProcessor(SoundProcessor sp);
   13.13 -	
   13.14 -}
    14.1 --- a/src/com/aurellem/capture/SeekableByteArrayOutputStream.java	Tue Oct 25 12:29:40 2011 -0700
    14.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    14.3 @@ -1,153 +0,0 @@
    14.4 -/*
    14.5 - * @(#)SeekableByteArrayOutputStream.java  1.0  2010-12-27
    14.6 - * 
    14.7 - * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland.
    14.8 - * All rights reserved.
    14.9 - * 
   14.10 - * You may not use, copy or modify this file, except in compliance with the
   14.11 - * license agreement you entered into with Werner Randelshofer.
   14.12 - * For details see accompanying license terms.
   14.13 - */
   14.14 -
   14.15 -package com.aurellem.capture;
   14.16 -
   14.17 -import java.io.ByteArrayOutputStream;
   14.18 -import java.io.IOException;
   14.19 -import java.io.OutputStream;
   14.20 -import java.util.Arrays;
   14.21 -import static java.lang.Math.*;
   14.22 -/**
   14.23 - * {@code SeekableByteArrayOutputStream}.
   14.24 - *
   14.25 - * @author Werner Randelshofer
   14.26 - * @version 1.0 2010-12-27 Created.
   14.27 - */
   14.28 -public class SeekableByteArrayOutputStream extends ByteArrayOutputStream {
   14.29 -
   14.30 -    /**
   14.31 -     * The current stream position.
   14.32 -     */
   14.33 -    private int pos;
   14.34 -
   14.35 -    /**
   14.36 -     * Creates a new byte array output stream. The buffer capacity is
   14.37 -     * initially 32 bytes, though its size increases if necessary.
   14.38 -     */
   14.39 -    public SeekableByteArrayOutputStream() {
   14.40 -	this(32);
   14.41 -    }
   14.42 -
   14.43 -    /**
   14.44 -     * Creates a new byte array output stream, with a buffer capacity of
   14.45 -     * the specified size, in bytes.
   14.46 -     *
   14.47 -     * @param   size   the initial size.
   14.48 -     * @exception  IllegalArgumentException if size is negative.
   14.49 -     */
   14.50 -    public SeekableByteArrayOutputStream(int size) {
   14.51 -        if (size < 0) {
   14.52 -            throw new IllegalArgumentException("Negative initial size: "
   14.53 -                                               + size);
   14.54 -        }
   14.55 -	buf = new byte[size];
   14.56 -    }
   14.57 -
   14.58 -    /**
   14.59 -     * Writes the specified byte to this byte array output stream.
   14.60 -     *
   14.61 -     * @param   b   the byte to be written.
   14.62 -     */
   14.63 -    @Override
   14.64 -    public synchronized void write(int b) {
   14.65 -	int newcount = max(pos + 1, count);
   14.66 -	if (newcount > buf.length) {
   14.67 -            buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
   14.68 -	}
   14.69 -	buf[pos++] = (byte)b;
   14.70 -	count = newcount;
   14.71 -    }
   14.72 -
   14.73 -    /**
   14.74 -     * Writes <code>len</code> bytes from the specified byte array
   14.75 -     * starting at offset <code>off</code> to this byte array output stream.
   14.76 -     *
   14.77 -     * @param   b     the data.
   14.78 -     * @param   off   the start offset in the data.
   14.79 -     * @param   len   the number of bytes to write.
   14.80 -     */
   14.81 -    @Override
   14.82 -    public synchronized void write(byte b[], int off, int len) {
   14.83 -	if ((off < 0) || (off > b.length) || (len < 0) ||
   14.84 -            ((off + len) > b.length) || ((off + len) < 0)) {
   14.85 -	    throw new IndexOutOfBoundsException();
   14.86 -	} else if (len == 0) {
   14.87 -	    return;
   14.88 -	}
   14.89 -        int newcount = max(pos+len,count);
   14.90 -        if (newcount > buf.length) {
   14.91 -            buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
   14.92 -        }
   14.93 -        System.arraycopy(b, off, buf, pos, len);
   14.94 -        pos+=len;
   14.95 -        count = newcount;
   14.96 -    }
   14.97 -
   14.98 -    /**
   14.99 -     * Resets the <code>count</code> field of this byte array output
  14.100 -     * stream to zero, so that all currently accumulated output in the
  14.101 -     * output stream is discarded. The output stream can be used again,
  14.102 -     * reusing the already allocated buffer space.
  14.103 -     *
  14.104 -     * @see     java.io.ByteArrayInputStream#count
  14.105 -     */
  14.106 -    @Override
  14.107 -    public synchronized void reset() {
  14.108 -	count = 0;
  14.109 -        pos=0;
  14.110 -    }
  14.111 -
  14.112 -    /**
  14.113 -     * Sets the current stream position to the desired location.  The
  14.114 -     * next read will occur at this location.  The bit offset is set
  14.115 -     * to 0.
  14.116 -     *
  14.117 -     * <p> An <code>IndexOutOfBoundsException</code> will be thrown if
  14.118 -     * <code>pos</code> is smaller than the flushed position (as
  14.119 -     * returned by <code>getflushedPosition</code>).
  14.120 -     *
  14.121 -     * <p> It is legal to seek past the end of the file; an
  14.122 -     * <code>EOFException</code> will be thrown only if a read is
  14.123 -     * performed.
  14.124 -     *
  14.125 -     * @param pos a <code>long</code> containing the desired file
  14.126 -     * pointer position.
  14.127 -     *
  14.128 -     * @exception IndexOutOfBoundsException if <code>pos</code> is smaller
  14.129 -     * than the flushed position.
  14.130 -     * @exception IOException if any other I/O error occurs.
  14.131 -     */
  14.132 -    public void seek(long pos) throws IOException {
  14.133 -        this.pos = (int)pos;
  14.134 -    }
  14.135 -
  14.136 -        /**
  14.137 -     * Returns the current byte position of the stream.  The next write
  14.138 -     * will take place starting at this offset.
  14.139 -     *
  14.140 -     * @return a long containing the position of the stream.
  14.141 -     *
  14.142 -     * @exception IOException if an I/O error occurs.
  14.143 -     */
  14.144 -    public long getStreamPosition() throws IOException {
  14.145 -        return pos;
  14.146 -    }
  14.147 -
  14.148 -    /** Writes the contents of the byte array into the specified output
  14.149 -     * stream.
  14.150 -     * @param out
  14.151 -     */
  14.152 -    public void toOutputStream(OutputStream out) throws IOException {
  14.153 -        out.write(buf, 0, count);
  14.154 -    }
  14.155 -
  14.156 -}
    15.1 --- a/src/com/aurellem/capture/SoundProcessor.java	Tue Oct 25 12:29:40 2011 -0700
    15.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    15.3 @@ -1,11 +0,0 @@
    15.4 -package com.aurellem.capture;
    15.5 -
    15.6 -import java.nio.ByteBuffer;
    15.7 -
    15.8 -public interface SoundProcessor {
    15.9 -
   15.10 -	void cleanup();
   15.11 -	
   15.12 -	void process(ByteBuffer audioSamples, int numSamples);
   15.13 -	
   15.14 -}
    16.1 --- a/src/com/aurellem/capture/WaveFileWriter.java	Tue Oct 25 12:29:40 2011 -0700
    16.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    16.3 @@ -1,46 +0,0 @@
    16.4 -package com.aurellem.capture;
    16.5 -
    16.6 -import java.io.ByteArrayInputStream;
    16.7 -import java.io.File;
    16.8 -import java.io.IOException;
    16.9 -import java.nio.ByteBuffer;
   16.10 -import java.util.Vector;
   16.11 -
   16.12 -import javax.sound.sampled.AudioFileFormat;
   16.13 -import javax.sound.sampled.AudioFormat;
   16.14 -import javax.sound.sampled.AudioInputStream;
   16.15 -import javax.sound.sampled.AudioSystem;
   16.16 -
   16.17 -public class WaveFileWriter implements SoundProcessor {
   16.18 -
   16.19 -	public Vector<Byte> fullWaveData = new Vector<Byte>();
   16.20 -	public File targetFile;
   16.21 -	
   16.22 -	public WaveFileWriter(File targetFile){
   16.23 -		this.targetFile = targetFile;
   16.24 -	}
   16.25 -	
   16.26 -	public void cleanup() {
   16.27 -		byte[] data = new byte[this.fullWaveData.size()];
   16.28 -		
   16.29 -		for (int i = 0; i < this.fullWaveData.size(); i++){
   16.30 -			data[i] = this.fullWaveData.get(i);}
   16.31 -		
   16.32 -		
   16.33 -		ByteArrayInputStream input = new ByteArrayInputStream(data);
   16.34 -		AudioFormat format = new AudioFormat(44100.0f, 32, 1, true, false); 
   16.35 -		AudioInputStream audioInput = new AudioInputStream(input, format, data.length / 4 );
   16.36 -		try {AudioSystem.write(audioInput, AudioFileFormat.Type.WAVE, targetFile);} 
   16.37 -		catch (IOException e) {e.printStackTrace();}
   16.38 -
   16.39 -	}
   16.40 -
   16.41 -	
   16.42 -	public void process(ByteBuffer audioSamples, int numSamples) {
   16.43 -		for (int i = 0; i<numSamples; i++){
   16.44 -			Byte b = audioSamples.get(i);
   16.45 -			fullWaveData.add(b);
   16.46 -		}
   16.47 -	}
   16.48 -
   16.49 -}
    17.1 --- a/src/com/aurellem/capture/XuggleVideoRecorder.java	Tue Oct 25 12:29:40 2011 -0700
    17.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    17.3 @@ -1,53 +0,0 @@
    17.4 -package com.aurellem.capture;
    17.5 -
    17.6 -
    17.7 -/**
    17.8 - * Handles writing video files using Xuggle.
    17.9 - * 
   17.10 - * 
   17.11 - * @author Robert McIntyre
   17.12 - *
   17.13 - */
   17.14 -/*
   17.15 -public  class XuggleVideoRecorder extends AbstractVideoRecorder{
   17.16 -
   17.17 -
   17.18 -	IMediaWriter writer;
   17.19 -	BufferedImage frame;
   17.20 -	int videoChannel = 0;
   17.21 -	long currentTimeStamp = 0;
   17.22 -	boolean videoReady = false;
   17.23 -	
   17.24 -	
   17.25 -	public XuggleVideoRecorder(File output) throws IOException {super(output);}
   17.26 -	
   17.27 -	public void initVideo(){
   17.28 -		this.frame = new BufferedImage(
   17.29 -				width, height,
   17.30 -				BufferedImage.TYPE_3BYTE_BGR);
   17.31 -		this.writer = ToolFactory.makeWriter(this.targetFileName);
   17.32 -		writer.addVideoStream(videoChannel, 
   17.33 -				0, IRational.make(fps), 
   17.34 -				width, height);
   17.35 -		this.videoReady = true;
   17.36 -	}
   17.37 -
   17.38 -		
   17.39 -	public void record(BufferedImage rawFrame) {
   17.40 -		if (!this.videoReady){initVideo();}
   17.41 -		// convert the Image into the form that Xuggle likes.
   17.42 -		this.frame.getGraphics().drawImage(rawFrame, 0, 0, null);
   17.43 -		writer.encodeVideo(videoChannel, 
   17.44 -			frame,
   17.45 -			currentTimeStamp, TimeUnit.NANOSECONDS);
   17.46 -		
   17.47 -		currentTimeStamp += (long) (1000000000.0 / fps);
   17.48 -	}
   17.49 -
   17.50 -	public void finish() {
   17.51 -		writer.close();
   17.52 -	}
   17.53 -	
   17.54 -}
   17.55 -
   17.56 -*/
    18.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    18.2 +++ b/src/com/aurellem/capture/audio/AudioSend.java	Wed Oct 26 08:54:12 2011 -0700
    18.3 @@ -0,0 +1,260 @@
    18.4 +package com.aurellem.capture.audio;
    18.5 +
    18.6 +import java.lang.reflect.Field;
    18.7 +import java.nio.ByteBuffer;
    18.8 +import java.util.HashMap;
    18.9 +import java.util.Vector;
   18.10 +import java.util.concurrent.CountDownLatch;
   18.11 +import java.util.logging.Level;
   18.12 +import java.util.logging.Logger;
   18.13 +
   18.14 +import org.lwjgl.LWJGLException;
   18.15 +import org.lwjgl.openal.AL;
   18.16 +import org.lwjgl.openal.AL10;
   18.17 +import org.lwjgl.openal.ALCdevice;
   18.18 +import org.lwjgl.openal.OpenALException;
   18.19 +
   18.20 +import com.jme3.audio.Listener;
   18.21 +import com.jme3.audio.lwjgl.LwjglAudioRenderer;
   18.22 +import com.jme3.math.Vector3f;
   18.23 +import com.jme3.util.BufferUtils;
   18.24 +
   18.25 +public class AudioSend 
   18.26 +	extends LwjglAudioRenderer implements MultiListener {
   18.27 +
   18.28 +	/**
   18.29 +	 * Keeps track of all the listeners which have been registered so far.
   18.30 +	 * The first element is <code>null</code>, which represents the zeroth 
   18.31 +	 * LWJGL listener which is created automatically.
   18.32 +	 */
   18.33 +	public Vector<Listener> listeners = new Vector<Listener>();
   18.34 +	
   18.35 +	public void initialize(){
   18.36 +		super.initialize();
   18.37 +		listeners.add(null);
   18.38 +	}
   18.39 +	
   18.40 +	/**
   18.41 +	 * This is to call the native methods which require the OpenAL device ID.
   18.42 +	 * currently it is obtained through reflection.
   18.43 +	 */
   18.44 +	private long deviceID;
   18.45 +	
   18.46 +	/**
   18.47 +	 * To ensure that <code>deviceID<code> and <code>listeners<code> are 
   18.48 +	 * properly initialized before any additional listeners are added.
   18.49 +	 */
   18.50 +	private CountDownLatch latch  = new CountDownLatch(1);
   18.51 +	
   18.52 +	private void waitForInit(){
   18.53 +		try {latch.await();} 
   18.54 +		catch (InterruptedException e) {e.printStackTrace();}
   18.55 +	}
   18.56 +	
   18.57 +	/**
   18.58 +	 * Each listener (including the main LWJGL listener) can be registered
   18.59 +	 * with a <code>SoundProcessor</code>, which this Renderer will call whenever 
   18.60 +	 * there is new audio data to be processed.
   18.61 +	 */
   18.62 +	public HashMap<Listener, SoundProcessor> soundProcessorMap =
   18.63 +		new HashMap<Listener, SoundProcessor>();
   18.64 +	
   18.65 +		
   18.66 +	/**
   18.67 +	 * Create a new slave context on the recorder device which will render all the 
   18.68 +	 * sounds in the main LWJGL context with respect to this listener.
   18.69 +	 */
   18.70 +	public void addListener(Listener l) {
   18.71 +		try {this.latch.await();} 
   18.72 +		catch (InterruptedException e) {e.printStackTrace();}
   18.73 +		this.addListener();
   18.74 +		this.listeners.add(l);
   18.75 +	}
   18.76 +	
   18.77 +	/**
   18.78 +	 * Whenever new data is rendered in the perspective of this listener, 
   18.79 +	 * this Renderer will send that data to the SoundProcessor of your choosing.
   18.80 +	 */
   18.81 +	public void registerSoundProcessor(Listener l, SoundProcessor sp) {
   18.82 +		this.soundProcessorMap.put(l, sp);
   18.83 +	}
   18.84 +	
   18.85 +	/**
   18.86 +	 * Registers a SoundProcessor for the main LWJGL context. IF all you want to 
   18.87 +	 * do is record the sound you would normally hear in your application, then 
   18.88 +	 * this is the only method you have to worry about.
   18.89 +	 */
   18.90 +	public void registerSoundProcessor(SoundProcessor sp){
   18.91 +		// register a sound processor for the default listener.
   18.92 +		this.soundProcessorMap.put(null, sp);		
   18.93 +	}
   18.94 +		
   18.95 +	private static final Logger logger = 
   18.96 +		Logger.getLogger(AudioSend.class.getName());
   18.97 +
   18.98 +	
   18.99 +	////////////   Native Methods
  18.100 +	
  18.101 +	/** This establishes the LWJGL context as the context which will be copies to all 
  18.102 +	 *  other contexts.  It must be called before any calls to <code>addListener();</code>
  18.103 +	 */
  18.104 +	public void initDevice(){
  18.105 +		ninitDevice(this.deviceID);}
  18.106 +	public static native void ninitDevice(long device);
  18.107 +	
  18.108 +	/**
  18.109 +	 * The send device does not automatically process sound.  This step function will cause 
  18.110 +	 * the desired number of samples to be processed for each listener.  The results will then 
  18.111 +	 * be available via calls to <code>getSamples()</code> for each listener.
  18.112 +	 * @param samples
  18.113 +	 */
  18.114 +	public void step(int samples){
  18.115 +		nstep(this.deviceID, samples);}
  18.116 +	public static native void nstep(long device, int samples);
  18.117 +
  18.118 +	/**
  18.119 +	 * Retrieve the final rendered sound for a particular listener.  <code>contextNum == 0</code>
  18.120 +	 * is the main LWJGL context.
  18.121 +	 * @param buffer
  18.122 +	 * @param samples
  18.123 +	 * @param contextNum
  18.124 +	 */
  18.125 +	public void getSamples(ByteBuffer buffer, int samples, int contextNum){
  18.126 +		ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);}
  18.127 +	public static native void ngetSamples(
  18.128 +			long device, ByteBuffer buffer, int position, int samples, int contextNum);
  18.129 +	
  18.130 +	/**
  18.131 +	 * Create an additional listener on the recorder device.  The device itself will manage 
  18.132 +	 * this listener and synchronize it with the main LWJGL context. Processed sound samples
  18.133 +	 * for this listener will be available via a call to <code>getSamples()</code> with 
  18.134 +	 * <code>contextNum</code> equal to the number of times this method has been called. 
  18.135 +	 */
  18.136 +	public void addListener(){naddListener(this.deviceID);}
  18.137 +	public static native void naddListener(long device);
  18.138 +	
  18.139 +	/**
  18.140 +	 * This will internally call <code>alListener3f<code> in the appropriate slave context and update
  18.141 +	 * that context's listener's parameters. Calling this for a number greater than the current 
  18.142 +	 * number of slave contexts will have no effect.
  18.143 +	 * @param pname
  18.144 +	 * @param v1
  18.145 +	 * @param v2
  18.146 +	 * @param v3
  18.147 +	 * @param contextNum
  18.148 +	 */
  18.149 +	public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){
  18.150 +		nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);}
  18.151 +	public static native void 
  18.152 +	nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum);
  18.153 +	
  18.154 +	/**
  18.155 +	 * This will internally call <code>alListenerf<code> in the appropriate slave context and update
  18.156 +	 * that context's listener's parameters. Calling this for a number greater than the current 
  18.157 +	 * number of slave contexts will have no effect.
  18.158 +	 * @param pname
  18.159 +	 * @param v1
  18.160 +	 * @param contextNum
  18.161 +	 */
  18.162 +	public void setNthListenerf(int pname, float v1, int contextNum){
  18.163 +		nsetNthListenerf(pname, v1, this.deviceID, contextNum);}
  18.164 +	public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum);
  18.165 +	
  18.166 +	/**
  18.167 +	 * Instead of taking whatever device is available on the system, this call 
  18.168 +	 * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited
  18.169 +	 * capacity.  For each listener, the device renders it not to the sound device, but
  18.170 +	 * instead to buffers which it makes available via JNI.
  18.171 +	 */
  18.172 +	public void initInThread(){
  18.173 +		try{
  18.174 +            if (!AL.isCreated()){
  18.175 +                AL.create("Multiple Audio Send", 44100, 60, false);
  18.176 +            }
  18.177 +        }catch (OpenALException ex){
  18.178 +            logger.log(Level.SEVERE, "Failed to load audio library", ex);
  18.179 +            System.exit(1);
  18.180 +            return;
  18.181 +        }catch (LWJGLException ex){
  18.182 +            logger.log(Level.SEVERE, "Failed to load audio library", ex);
  18.183 +            System.exit(1);
  18.184 +            return;
  18.185 +        }
  18.186 +		super.initInThread();
  18.187 +
  18.188 +		ALCdevice device = AL.getDevice();
  18.189 +
  18.190 +		// RLM: use reflection to grab the ID of our device for use later.
  18.191 +		try {
  18.192 +			Field deviceIDField;
  18.193 +			deviceIDField = ALCdevice.class.getDeclaredField("device");
  18.194 +			deviceIDField.setAccessible(true);
  18.195 +			try {deviceID = (Long)deviceIDField.get(device);} 
  18.196 +			catch (IllegalArgumentException e) {e.printStackTrace();} 
  18.197 +			catch (IllegalAccessException e) {e.printStackTrace();}
  18.198 +			deviceIDField.setAccessible(false);} 
  18.199 +		catch (SecurityException e) {e.printStackTrace();} 
  18.200 +		catch (NoSuchFieldException e) {e.printStackTrace();}
  18.201 +		
  18.202 +		// the LWJGL context must be established as the master context before 
  18.203 +		// any other listeners can be created on this device.
  18.204 +		initDevice();
  18.205 +		// Now, everything is initialized, and it is safe to add more listeners.
  18.206 +		latch.countDown();
  18.207 +	}
  18.208 +
  18.209 +	
  18.210 +	public void cleanup(){
  18.211 +		for(SoundProcessor sp : this.soundProcessorMap.values()){
  18.212 +			sp.cleanup();
  18.213 +		}
  18.214 +		super.cleanup();
  18.215 +	}
  18.216 +	
  18.217 +	public void updateAllListeners(){
  18.218 +		for (int i = 0; i < this.listeners.size(); i++){
  18.219 +			Listener lis = this.listeners.get(i);
  18.220 +			if (null != lis){
  18.221 +				Vector3f location = lis.getLocation();
  18.222 +				Vector3f velocity = lis.getVelocity();
  18.223 +				Vector3f orientation = lis.getUp();
  18.224 +				float gain = lis.getVolume();
  18.225 +				setNthListener3f(AL10.AL_POSITION, 
  18.226 +						location.x, location.y, location.z, i);
  18.227 +				setNthListener3f(AL10.AL_VELOCITY, 
  18.228 +						velocity.x, velocity.y, velocity.z, i);
  18.229 +				setNthListener3f(AL10.AL_ORIENTATION,
  18.230 +						orientation.x, orientation.y, orientation.z, i);
  18.231 +				setNthListenerf(AL10.AL_GAIN, gain, i);
  18.232 +			}
  18.233 +		}
  18.234 +	}
  18.235 +	
  18.236 +	
  18.237 +	public final static int BYTES_PER_SAMPLE = 4;
  18.238 +	private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 
  18.239 +	
  18.240 +	public void dispatchAudio(float tpf){
  18.241 +		int samplesToGet = (int) (tpf * 44100);
  18.242 +		try {latch.await();} 
  18.243 +		catch (InterruptedException e) {e.printStackTrace();}
  18.244 +		step(samplesToGet);
  18.245 +		updateAllListeners();
  18.246 +		
  18.247 +		for (int i = 0; i < this.listeners.size(); i++){		
  18.248 +			buffer.clear();
  18.249 +			this.getSamples(buffer, samplesToGet, i);
  18.250 +			SoundProcessor sp = 
  18.251 +			this.soundProcessorMap.get(this.listeners.get(i));
  18.252 +			if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);}
  18.253 +		}
  18.254 +		
  18.255 +	}
  18.256 +		
  18.257 +	public void update(float tpf){
  18.258 +		super.update(tpf);
  18.259 +        dispatchAudio(tpf);
  18.260 +	}
  18.261 +	
  18.262 +}
  18.263 +
    19.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    19.2 +++ b/src/com/aurellem/capture/audio/MultiListener.java	Wed Oct 26 08:54:12 2011 -0700
    19.3 @@ -0,0 +1,11 @@
    19.4 +package com.aurellem.capture.audio;
    19.5 +
    19.6 +import com.jme3.audio.Listener;
    19.7 +
    19.8 +public interface MultiListener {
    19.9 +
   19.10 +	void addListener(Listener l);
   19.11 +	void registerSoundProcessor(Listener l, SoundProcessor sp);
   19.12 +	void registerSoundProcessor(SoundProcessor sp);
   19.13 +	
   19.14 +}
    20.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    20.2 +++ b/src/com/aurellem/capture/audio/SeekableByteArrayOutputStream.java	Wed Oct 26 08:54:12 2011 -0700
    20.3 @@ -0,0 +1,153 @@
    20.4 +/*
    20.5 + * @(#)SeekableByteArrayOutputStream.java  1.0  2010-12-27
    20.6 + * 
    20.7 + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland.
    20.8 + * All rights reserved.
    20.9 + * 
   20.10 + * You may not use, copy or modify this file, except in compliance with the
   20.11 + * license agreement you entered into with Werner Randelshofer.
   20.12 + * For details see accompanying license terms.
   20.13 + */
   20.14 +
   20.15 +package com.aurellem.capture.audio;
   20.16 +
   20.17 +import java.io.ByteArrayOutputStream;
   20.18 +import java.io.IOException;
   20.19 +import java.io.OutputStream;
   20.20 +import java.util.Arrays;
   20.21 +import static java.lang.Math.*;
   20.22 +/**
   20.23 + * {@code SeekableByteArrayOutputStream}.
   20.24 + *
   20.25 + * @author Werner Randelshofer
   20.26 + * @version 1.0 2010-12-27 Created.
   20.27 + */
   20.28 +public class SeekableByteArrayOutputStream extends ByteArrayOutputStream {
   20.29 +
   20.30 +    /**
   20.31 +     * The current stream position.
   20.32 +     */
   20.33 +    private int pos;
   20.34 +
   20.35 +    /**
   20.36 +     * Creates a new byte array output stream. The buffer capacity is
   20.37 +     * initially 32 bytes, though its size increases if necessary.
   20.38 +     */
   20.39 +    public SeekableByteArrayOutputStream() {
   20.40 +	this(32);
   20.41 +    }
   20.42 +
   20.43 +    /**
   20.44 +     * Creates a new byte array output stream, with a buffer capacity of
   20.45 +     * the specified size, in bytes.
   20.46 +     *
   20.47 +     * @param   size   the initial size.
   20.48 +     * @exception  IllegalArgumentException if size is negative.
   20.49 +     */
   20.50 +    public SeekableByteArrayOutputStream(int size) {
   20.51 +        if (size < 0) {
   20.52 +            throw new IllegalArgumentException("Negative initial size: "
   20.53 +                                               + size);
   20.54 +        }
   20.55 +	buf = new byte[size];
   20.56 +    }
   20.57 +
   20.58 +    /**
   20.59 +     * Writes the specified byte to this byte array output stream.
   20.60 +     *
   20.61 +     * @param   b   the byte to be written.
   20.62 +     */
   20.63 +    @Override
   20.64 +    public synchronized void write(int b) {
   20.65 +	int newcount = max(pos + 1, count);
   20.66 +	if (newcount > buf.length) {
   20.67 +            buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
   20.68 +	}
   20.69 +	buf[pos++] = (byte)b;
   20.70 +	count = newcount;
   20.71 +    }
   20.72 +
   20.73 +    /**
   20.74 +     * Writes <code>len</code> bytes from the specified byte array
   20.75 +     * starting at offset <code>off</code> to this byte array output stream.
   20.76 +     *
   20.77 +     * @param   b     the data.
   20.78 +     * @param   off   the start offset in the data.
   20.79 +     * @param   len   the number of bytes to write.
   20.80 +     */
   20.81 +    @Override
   20.82 +    public synchronized void write(byte b[], int off, int len) {
   20.83 +	if ((off < 0) || (off > b.length) || (len < 0) ||
   20.84 +            ((off + len) > b.length) || ((off + len) < 0)) {
   20.85 +	    throw new IndexOutOfBoundsException();
   20.86 +	} else if (len == 0) {
   20.87 +	    return;
   20.88 +	}
   20.89 +        int newcount = max(pos+len,count);
   20.90 +        if (newcount > buf.length) {
   20.91 +            buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount));
   20.92 +        }
   20.93 +        System.arraycopy(b, off, buf, pos, len);
   20.94 +        pos+=len;
   20.95 +        count = newcount;
   20.96 +    }
   20.97 +
   20.98 +    /**
   20.99 +     * Resets the <code>count</code> field of this byte array output
  20.100 +     * stream to zero, so that all currently accumulated output in the
  20.101 +     * output stream is discarded. The output stream can be used again,
  20.102 +     * reusing the already allocated buffer space.
  20.103 +     *
  20.104 +     * @see     java.io.ByteArrayInputStream#count
  20.105 +     */
  20.106 +    @Override
  20.107 +    public synchronized void reset() {
  20.108 +	count = 0;
  20.109 +        pos=0;
  20.110 +    }
  20.111 +
  20.112 +    /**
  20.113 +     * Sets the current stream position to the desired location.  The
  20.114 +     * next read will occur at this location.  The bit offset is set
  20.115 +     * to 0.
  20.116 +     *
  20.117 +     * <p> An <code>IndexOutOfBoundsException</code> will be thrown if
  20.118 +     * <code>pos</code> is smaller than the flushed position (as
  20.119 +     * returned by <code>getflushedPosition</code>).
  20.120 +     *
  20.121 +     * <p> It is legal to seek past the end of the file; an
  20.122 +     * <code>EOFException</code> will be thrown only if a read is
  20.123 +     * performed.
  20.124 +     *
  20.125 +     * @param pos a <code>long</code> containing the desired file
  20.126 +     * pointer position.
  20.127 +     *
  20.128 +     * @exception IndexOutOfBoundsException if <code>pos</code> is smaller
  20.129 +     * than the flushed position.
  20.130 +     * @exception IOException if any other I/O error occurs.
  20.131 +     */
  20.132 +    public void seek(long pos) throws IOException {
  20.133 +        this.pos = (int)pos;
  20.134 +    }
  20.135 +
  20.136 +        /**
  20.137 +     * Returns the current byte position of the stream.  The next write
  20.138 +     * will take place starting at this offset.
  20.139 +     *
  20.140 +     * @return a long containing the position of the stream.
  20.141 +     *
  20.142 +     * @exception IOException if an I/O error occurs.
  20.143 +     */
  20.144 +    public long getStreamPosition() throws IOException {
  20.145 +        return pos;
  20.146 +    }
  20.147 +
  20.148 +    /** Writes the contents of the byte array into the specified output
  20.149 +     * stream.
  20.150 +     * @param out
  20.151 +     */
  20.152 +    public void toOutputStream(OutputStream out) throws IOException {
  20.153 +        out.write(buf, 0, count);
  20.154 +    }
  20.155 +
  20.156 +}
    21.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    21.2 +++ b/src/com/aurellem/capture/audio/SoundProcessor.java	Wed Oct 26 08:54:12 2011 -0700
    21.3 @@ -0,0 +1,11 @@
    21.4 +package com.aurellem.capture.audio;
    21.5 +
    21.6 +import java.nio.ByteBuffer;
    21.7 +
    21.8 +public interface SoundProcessor {
    21.9 +
   21.10 +	void cleanup();
   21.11 +	
   21.12 +	void process(ByteBuffer audioSamples, int numSamples);
   21.13 +	
   21.14 +}
    22.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    22.2 +++ b/src/com/aurellem/capture/audio/WaveFileWriter.java	Wed Oct 26 08:54:12 2011 -0700
    22.3 @@ -0,0 +1,46 @@
    22.4 +package com.aurellem.capture.audio;
    22.5 +
    22.6 +import java.io.ByteArrayInputStream;
    22.7 +import java.io.File;
    22.8 +import java.io.IOException;
    22.9 +import java.nio.ByteBuffer;
   22.10 +import java.util.Vector;
   22.11 +
   22.12 +import javax.sound.sampled.AudioFileFormat;
   22.13 +import javax.sound.sampled.AudioFormat;
   22.14 +import javax.sound.sampled.AudioInputStream;
   22.15 +import javax.sound.sampled.AudioSystem;
   22.16 +
   22.17 +public class WaveFileWriter implements SoundProcessor {
   22.18 +
   22.19 +	public Vector<Byte> fullWaveData = new Vector<Byte>();
   22.20 +	public File targetFile;
   22.21 +	
   22.22 +	public WaveFileWriter(File targetFile){
   22.23 +		this.targetFile = targetFile;
   22.24 +	}
   22.25 +	
   22.26 +	public void cleanup() {
   22.27 +		byte[] data = new byte[this.fullWaveData.size()];
   22.28 +		
   22.29 +		for (int i = 0; i < this.fullWaveData.size(); i++){
   22.30 +			data[i] = this.fullWaveData.get(i);}
   22.31 +		
   22.32 +		
   22.33 +		ByteArrayInputStream input = new ByteArrayInputStream(data);
   22.34 +		AudioFormat format = new AudioFormat(44100.0f, 32, 1, true, false); 
   22.35 +		AudioInputStream audioInput = new AudioInputStream(input, format, data.length / 4 );
   22.36 +		try {AudioSystem.write(audioInput, AudioFileFormat.Type.WAVE, targetFile);} 
   22.37 +		catch (IOException e) {e.printStackTrace();}
   22.38 +
   22.39 +	}
   22.40 +
   22.41 +	
   22.42 +	public void process(ByteBuffer audioSamples, int numSamples) {
   22.43 +		for (int i = 0; i<numSamples; i++){
   22.44 +			Byte b = audioSamples.get(i);
   22.45 +			fullWaveData.add(b);
   22.46 +		}
   22.47 +	}
   22.48 +
   22.49 +}
    23.1 --- a/src/com/aurellem/capture/hello/HelloAudio.java	Tue Oct 25 12:29:40 2011 -0700
    23.2 +++ b/src/com/aurellem/capture/hello/HelloAudio.java	Wed Oct 26 08:54:12 2011 -0700
    23.3 @@ -1,12 +1,10 @@
    23.4  package com.aurellem.capture.hello;
    23.5  
    23.6  import java.io.File;
    23.7 -import java.util.logging.Level;
    23.8 -import java.util.logging.Logger;
    23.9  
   23.10  import com.aurellem.capture.IsoTimer;
   23.11 -import com.aurellem.capture.MultiListener;
   23.12 -import com.aurellem.capture.WaveFileWriter;
   23.13 +import com.aurellem.capture.audio.MultiListener;
   23.14 +import com.aurellem.capture.audio.WaveFileWriter;
   23.15  import com.jme3.app.SimpleApplication;
   23.16  import com.jme3.audio.AudioNode;
   23.17  import com.jme3.audio.Listener;
   23.18 @@ -30,10 +28,7 @@
   23.19    public File data1 = new File("/home/r/tmp/data1.wav");
   23.20    public File data2 = new File("/home/r/tmp/data2.wav");
   23.21    public File data3 = new File("/home/r/tmp/data3.wav");
   23.22 -  
   23.23 - 
   23.24 -  
   23.25 -  
   23.26 +   
   23.27    private File makeTarget(int n){
   23.28  	  	return new File("/home/r/tmp/assload-" + n + ".wav");
   23.29    }
   23.30 @@ -45,6 +40,7 @@
   23.31  	  
   23.32  	HelloAudio app = new HelloAudio();
   23.33  	AppSettings settings = new AppSettings(true);
   23.34 +	
   23.35  	settings.setAudioRenderer("Send");
   23.36  	app.setSettings(settings);
   23.37  	app.setShowSettings(false);
   23.38 @@ -139,6 +135,12 @@
   23.39      listener.setRotation(rot);
   23.40      auxListener.setLocation(loc);
   23.41      auxListener.setRotation(rot);
   23.42 +    if (audio_gun.getStatus() == AudioNode.Status.Stopped){
   23.43 +    	System.out.println("I'm Stopped!");
   23.44 +    	this.requestClose(false);
   23.45 +    }
   23.46 +    	
   23.47 +    	
   23.48    }
   23.49  
   23.50  }
    24.1 --- a/src/com/aurellem/capture/hello/HelloVideo.java	Tue Oct 25 12:29:40 2011 -0700
    24.2 +++ b/src/com/aurellem/capture/hello/HelloVideo.java	Wed Oct 26 08:54:12 2011 -0700
    24.3 @@ -3,10 +3,10 @@
    24.4  import java.io.File;
    24.5  import java.io.IOException;
    24.6  
    24.7 -import com.aurellem.capture.AVIVideoRecorder;
    24.8 -import com.aurellem.capture.AbstractVideoRecorder;
    24.9  import com.aurellem.capture.Capture;
   24.10  import com.aurellem.capture.IsoTimer;
   24.11 +import com.aurellem.capture.video.AVIVideoRecorder;
   24.12 +import com.aurellem.capture.video.AbstractVideoRecorder;
   24.13  import com.jme3.app.SimpleApplication;
   24.14  import com.jme3.material.Material;
   24.15  import com.jme3.math.ColorRGBA;
    25.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    25.2 +++ b/src/com/aurellem/capture/hello/TestWrite.java	Wed Oct 26 08:54:12 2011 -0700
    25.3 @@ -0,0 +1,13 @@
    25.4 +package com.aurellem.capture.hello;
    25.5 +
    25.6 +public class TestWrite {
    25.7 +
    25.8 +	
    25.9 +	
   25.10 +	
   25.11 +	
   25.12 +	
   25.13 +	
   25.14 +	
   25.15 +	
   25.16 +}
    26.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    26.2 +++ b/src/com/aurellem/capture/video/AVIOutputStream.java	Wed Oct 26 08:54:12 2011 -0700
    26.3 @@ -0,0 +1,1548 @@
    26.4 +/**
    26.5 + * @(#)AVIOutputStream.java  1.5.1  2011-01-17
    26.6 + *
    26.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
    26.8 + * All rights reserved.
    26.9 + *
   26.10 + * You may not use, copy or modify this file, except in compliance with the
   26.11 + * license agreement you entered into with Werner Randelshofer.
   26.12 + * For details see accompanying license terms.
   26.13 + */
   26.14 +package com.aurellem.capture.video;
   26.15 +
   26.16 +import java.awt.Dimension;
   26.17 +import java.awt.image.BufferedImage;
   26.18 +import java.awt.image.DataBufferByte;
   26.19 +import java.awt.image.IndexColorModel;
   26.20 +import java.awt.image.WritableRaster;
   26.21 +import java.io.File;
   26.22 +import java.io.FileInputStream;
   26.23 +import java.io.IOException;
   26.24 +import java.io.InputStream;
   26.25 +import java.io.OutputStream;
   26.26 +import java.util.Arrays;
   26.27 +import java.util.Date;
   26.28 +import java.util.LinkedList;
   26.29 +
   26.30 +import javax.imageio.IIOImage;
   26.31 +import javax.imageio.ImageIO;
   26.32 +import javax.imageio.ImageWriteParam;
   26.33 +import javax.imageio.ImageWriter;
   26.34 +import javax.imageio.stream.FileImageOutputStream;
   26.35 +import javax.imageio.stream.ImageOutputStream;
   26.36 +import javax.imageio.stream.MemoryCacheImageOutputStream;
   26.37 +
   26.38 +/**
   26.39 + * This class supports writing of images into an AVI 1.0 video file.
   26.40 + * <p>
   26.41 + * The images are written as video frames.
   26.42 + * <p>
   26.43 + * Video frames can be encoded with one of the following formats:
   26.44 + * <ul>
   26.45 + * <li>JPEG</li>
   26.46 + * <li>PNG</li>
   26.47 + * <li>RAW</li>
   26.48 + * <li>RLE</li>
   26.49 + * </ul>
   26.50 + * All frames must have the same format.
   26.51 + * When JPG is used each frame can have an individual encoding quality.
   26.52 + * <p>
   26.53 + * All frames in an AVI file must have the same duration. The duration can
   26.54 + * be set by setting an appropriate pair of values using methods
   26.55 + * {@link #setFrameRate} and {@link #setTimeScale}.
   26.56 + * <p>
   26.57 + * The length of an AVI 1.0 file is limited to 1 GB.
   26.58 + * This class supports lengths of up to 4 GB, but such files may not work on
   26.59 + * all players.
   26.60 + * <p>
   26.61 + * For detailed information about the AVI RIFF file format see:<br>
   26.62 + * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
   26.63 + * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
   26.64 + * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
   26.65 + *
   26.66 + * @author Werner Randelshofer
   26.67 + * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
   26.68 + * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
   26.69 + * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
   26.70 + * in "idx1" chunk.
   26.71 + * <br>1.3.2 2010-12-27 File size limit is 1 GB.
   26.72 + * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
   26.73 + * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
   26.74 + * Added method getVideoDimension().
   26.75 + * <br>1.2 2009-08-29 Adds support for RAW video format.
   26.76 + * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
   26.77 + * chunk. Changed the API to reflect that AVI works with frame rates instead of
   26.78 + * with frame durations.
   26.79 + * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
   26.80 + * encoded video.
   26.81 + * <br>1.0 2008-08-11 Created.
   26.82 + */
   26.83 +public class AVIOutputStream {
   26.84 +
   26.85 +    /**
   26.86 +     * Underlying output stream.
   26.87 +     */
   26.88 +    private ImageOutputStream out;
   26.89 +    /** The offset of the QuickTime stream in the underlying ImageOutputStream.
   26.90 +     * Normally this is 0 unless the underlying stream already contained data
   26.91 +     * when it was passed to the constructor.
   26.92 +     */
   26.93 +    private long streamOffset;
   26.94 +    /** Previous frame for delta compression. */
   26.95 +    private Object previousData;
   26.96 +
   26.97 +    /**
   26.98 +     * Supported video encodings.
   26.99 +     */
  26.100 +    public static enum VideoFormat {
  26.101 +
  26.102 +        RAW, RLE, JPG, PNG;
  26.103 +    }
  26.104 +    /**
  26.105 +     * Current video formats.
  26.106 +     */
  26.107 +    private VideoFormat videoFormat;
  26.108 +    /**
  26.109 +     * Quality of JPEG encoded video frames.
  26.110 +     */
  26.111 +    private float quality = 0.9f;
  26.112 +    /**
  26.113 +     * Creation time of the movie output stream.
  26.114 +     */
  26.115 +    private Date creationTime;
  26.116 +    /**
  26.117 +     * Width of the video frames. All frames must have the same width.
  26.118 +     * The value -1 is used to mark unspecified width.
  26.119 +     */
  26.120 +    private int imgWidth = -1;
  26.121 +    /**
  26.122 +     * Height of the video frames. All frames must have the same height.
  26.123 +     * The value -1 is used to mark unspecified height.
  26.124 +     */
  26.125 +    private int imgHeight = -1;
  26.126 +    /** Number of bits per pixel. */
  26.127 +    private int imgDepth = 24;
  26.128 +    /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
  26.129 +    private IndexColorModel palette;
  26.130 +    private IndexColorModel previousPalette;
  26.131 +    /** Video encoder. */
  26.132 +    
  26.133 +    /**
  26.134 +     * The timeScale of the movie.
  26.135 +     * <p>
  26.136 +     * Used with frameRate to specify the time scale that this stream will use.
  26.137 +     * Dividing frameRate by timeScale gives the number of samples per second.
  26.138 +     * For video streams, this is the frame rate. For audio streams, this rate
  26.139 +     * corresponds to the time needed to play nBlockAlign bytes of audio, which
  26.140 +     * for PCM audio is the just the sample rate.
  26.141 +     */
  26.142 +    private int timeScale = 1;
  26.143 +    /**
  26.144 +     * The frameRate of the movie in timeScale units.
  26.145 +     * <p>
  26.146 +     * @see timeScale
  26.147 +     */
  26.148 +    private int frameRate = 30;
  26.149 +    /** Interval between keyframes. */
  26.150 +    private int syncInterval = 30;
  26.151 +
  26.152 +    /**
  26.153 +     * The states of the movie output stream.
  26.154 +     */
  26.155 +    private static enum States {
  26.156 +
  26.157 +        STARTED, FINISHED, CLOSED;
  26.158 +    }
  26.159 +    /**
  26.160 +     * The current state of the movie output stream.
  26.161 +     */
  26.162 +    private States state = States.FINISHED;
  26.163 +
  26.164 +    /**
  26.165 +     * AVI stores media data in samples.
  26.166 +     * A sample is a single element in a sequence of time-ordered data.
  26.167 +     */
  26.168 +    private static class Sample {
  26.169 +
  26.170 +        String chunkType;
  26.171 +        /** Offset of the sample relative to the start of the AVI file.
  26.172 +         */
  26.173 +        long offset;
  26.174 +        /** Data length of the sample. */
  26.175 +        long length;
  26.176 +        /**
  26.177 +         * The duration of the sample in time scale units.
  26.178 +         */
  26.179 +        int duration;
  26.180 +        /** Whether the sample is a sync-sample. */
  26.181 +        boolean isSync;
  26.182 +
  26.183 +        /**
  26.184 +         * Creates a new sample.
  26.185 +         * @param duration
  26.186 +         * @param offset
  26.187 +         * @param length
  26.188 +         */
  26.189 +        public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
  26.190 +            this.chunkType = chunkId;
  26.191 +            this.duration = duration;
  26.192 +            this.offset = offset;
  26.193 +            this.length = length;
  26.194 +            this.isSync = isSync;
  26.195 +        }
  26.196 +    }
  26.197 +    /**
  26.198 +     * List of video frames.
  26.199 +     */
  26.200 +    private LinkedList<Sample> videoFrames;
  26.201 +    /**
  26.202 +     * This chunk holds the whole AVI content.
  26.203 +     */
  26.204 +    private CompositeChunk aviChunk;
  26.205 +    /**
  26.206 +     * This chunk holds the movie frames.
  26.207 +     */
  26.208 +    private CompositeChunk moviChunk;
  26.209 +    /**
  26.210 +     * This chunk holds the AVI Main Header.
  26.211 +     */
  26.212 +    FixedSizeDataChunk avihChunk;
  26.213 +    /**
  26.214 +     * This chunk holds the AVI Stream Header.
  26.215 +     */
  26.216 +    FixedSizeDataChunk strhChunk;
  26.217 +    /**
  26.218 +     * This chunk holds the AVI Stream Format Header.
  26.219 +     */
  26.220 +    FixedSizeDataChunk strfChunk;
  26.221 +
  26.222 +    /**
  26.223 +     * Chunk base class.
  26.224 +     */
  26.225 +    private abstract class Chunk {
  26.226 +
  26.227 +        /**
  26.228 +         * The chunkType of the chunk. A String with the length of 4 characters.
  26.229 +         */
  26.230 +        protected String chunkType;
  26.231 +        /**
  26.232 +         * The offset of the chunk relative to the start of the
  26.233 +         * ImageOutputStream.
  26.234 +         */
  26.235 +        protected long offset;
  26.236 +
  26.237 +        /**
  26.238 +         * Creates a new Chunk at the current position of the ImageOutputStream.
  26.239 +         * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
  26.240 +         */
  26.241 +        public Chunk(String chunkType) throws IOException {
  26.242 +            this.chunkType = chunkType;
  26.243 +            offset = getRelativeStreamPosition();
  26.244 +        }
  26.245 +
  26.246 +        /**
  26.247 +         * Writes the chunk to the ImageOutputStream and disposes it.
  26.248 +         */
  26.249 +        public abstract void finish() throws IOException;
  26.250 +
  26.251 +        /**
  26.252 +         * Returns the size of the chunk including the size of the chunk header.
  26.253 +         * @return The size of the chunk.
  26.254 +         */
  26.255 +        public abstract long size();
  26.256 +    }
  26.257 +
  26.258 +    /**
  26.259 +     * A CompositeChunk contains an ordered list of Chunks.
  26.260 +     */
  26.261 +    private class CompositeChunk extends Chunk {
  26.262 +
  26.263 +        /**
  26.264 +         * The type of the composite. A String with the length of 4 characters.
  26.265 +         */
  26.266 +        protected String compositeType;
  26.267 +        private LinkedList<Chunk> children;
  26.268 +        private boolean finished;
  26.269 +
  26.270 +        /**
  26.271 +         * Creates a new CompositeChunk at the current position of the
  26.272 +         * ImageOutputStream.
  26.273 +         * @param compositeType The type of the composite.
  26.274 +         * @param chunkType The type of the chunk.
  26.275 +         */
  26.276 +        public CompositeChunk(String compositeType, String chunkType) throws IOException {
  26.277 +            super(chunkType);
  26.278 +            this.compositeType = compositeType;
  26.279 +            //out.write
  26.280 +            out.writeLong(0); // make room for the chunk header
  26.281 +            out.writeInt(0); // make room for the chunk header
  26.282 +            children = new LinkedList<Chunk>();
  26.283 +        }
  26.284 +
  26.285 +        public void add(Chunk child) throws IOException {
  26.286 +            if (children.size() > 0) {
  26.287 +                children.getLast().finish();
  26.288 +            }
  26.289 +            children.add(child);
  26.290 +        }
  26.291 +
  26.292 +        /**
  26.293 +         * Writes the chunk and all its children to the ImageOutputStream
  26.294 +         * and disposes of all resources held by the chunk.
  26.295 +         * @throws java.io.IOException
  26.296 +         */
  26.297 +        @Override
  26.298 +        public void finish() throws IOException {
  26.299 +            if (!finished) {
  26.300 +                if (size() > 0xffffffffL) {
  26.301 +                    throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
  26.302 +                }
  26.303 +
  26.304 +                long pointer = getRelativeStreamPosition();
  26.305 +                seekRelative(offset);
  26.306 +
  26.307 +                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
  26.308 +                headerData.writeType(compositeType);
  26.309 +                headerData.writeUInt(size() - 8);
  26.310 +                headerData.writeType(chunkType);
  26.311 +                for (Chunk child : children) {
  26.312 +                    child.finish();
  26.313 +                }
  26.314 +                seekRelative(pointer);
  26.315 +                if (size() % 2 == 1) {
  26.316 +                    out.writeByte(0); // write pad byte
  26.317 +                }
  26.318 +                finished = true;
  26.319 +            }
  26.320 +        }
  26.321 +
  26.322 +        @Override
  26.323 +        public long size() {
  26.324 +            long length = 12;
  26.325 +            for (Chunk child : children) {
  26.326 +                length += child.size() + child.size() % 2;
  26.327 +            }
  26.328 +            return length;
  26.329 +        }
  26.330 +    }
  26.331 +
  26.332 +    /**
  26.333 +     * Data Chunk.
  26.334 +     */
  26.335 +    private class DataChunk extends Chunk {
  26.336 +
  26.337 +        private DataChunkOutputStream data;
  26.338 +        private boolean finished;
  26.339 +
  26.340 +        /**
  26.341 +         * Creates a new DataChunk at the current position of the
  26.342 +         * ImageOutputStream.
  26.343 +         * @param chunkType The chunkType of the chunk.
  26.344 +         */
  26.345 +        public DataChunk(String name) throws IOException {
  26.346 +            super(name);
  26.347 +            out.writeLong(0); // make room for the chunk header
  26.348 +            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
  26.349 +        }
  26.350 +
  26.351 +        public DataChunkOutputStream getOutputStream() {
  26.352 +            if (finished) {
  26.353 +                throw new IllegalStateException("DataChunk is finished");
  26.354 +            }
  26.355 +            return data;
  26.356 +        }
  26.357 +
  26.358 +        /**
  26.359 +         * Returns the offset of this chunk to the beginning of the random access file
  26.360 +         * @return
  26.361 +         */
  26.362 +        public long getOffset() {
  26.363 +            return offset;
  26.364 +        }
  26.365 +
  26.366 +        @Override
  26.367 +        public void finish() throws IOException {
  26.368 +            if (!finished) {
  26.369 +                long sizeBefore = size();
  26.370 +
  26.371 +                if (size() > 0xffffffffL) {
  26.372 +                    throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
  26.373 +                }
  26.374 +
  26.375 +                long pointer = getRelativeStreamPosition();
  26.376 +                seekRelative(offset);
  26.377 +
  26.378 +                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
  26.379 +                headerData.writeType(chunkType);
  26.380 +                headerData.writeUInt(size() - 8);
  26.381 +                seekRelative(pointer);
  26.382 +                if (size() % 2 == 1) {
  26.383 +                    out.writeByte(0); // write pad byte
  26.384 +                }
  26.385 +                finished = true;
  26.386 +                long sizeAfter = size();
  26.387 +                if (sizeBefore != sizeAfter) {
  26.388 +                    System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
  26.389 +                }
  26.390 +            }
  26.391 +        }
  26.392 +
  26.393 +        @Override
  26.394 +        public long size() {
  26.395 +            return 8 + data.size();
  26.396 +        }
  26.397 +    }
  26.398 +
  26.399 +    /**
  26.400 +     * A DataChunk with a fixed size.
  26.401 +     */
  26.402 +    private class FixedSizeDataChunk extends Chunk {
  26.403 +
  26.404 +        private DataChunkOutputStream data;
  26.405 +        private boolean finished;
  26.406 +        private long fixedSize;
  26.407 +
  26.408 +        /**
  26.409 +         * Creates a new DataChunk at the current position of the
  26.410 +         * ImageOutputStream.
  26.411 +         * @param chunkType The chunkType of the chunk.
  26.412 +         */
  26.413 +        public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
  26.414 +            super(chunkType);
  26.415 +            this.fixedSize = fixedSize;
  26.416 +            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
  26.417 +            data.writeType(chunkType);
  26.418 +            data.writeUInt(fixedSize);
  26.419 +            data.clearCount();
  26.420 +
  26.421 +            // Fill fixed size with nulls
  26.422 +            byte[] buf = new byte[(int) Math.min(512, fixedSize)];
  26.423 +            long written = 0;
  26.424 +            while (written < fixedSize) {
  26.425 +                data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
  26.426 +                written += Math.min(buf.length, fixedSize - written);
  26.427 +            }
  26.428 +            if (fixedSize % 2 == 1) {
  26.429 +                out.writeByte(0); // write pad byte
  26.430 +            }
  26.431 +            seekToStartOfData();
  26.432 +        }
  26.433 +
  26.434 +        public DataChunkOutputStream getOutputStream() {
  26.435 +            /*if (finished) {
  26.436 +            throw new IllegalStateException("DataChunk is finished");
  26.437 +            }*/
  26.438 +            return data;
  26.439 +        }
  26.440 +
  26.441 +        /**
  26.442 +         * Returns the offset of this chunk to the beginning of the random access file
  26.443 +         * @return
  26.444 +         */
  26.445 +        public long getOffset() {
  26.446 +            return offset;
  26.447 +        }
  26.448 +
  26.449 +        public void seekToStartOfData() throws IOException {
  26.450 +            seekRelative(offset + 8);
  26.451 +            data.clearCount();
  26.452 +        }
  26.453 +
  26.454 +        public void seekToEndOfChunk() throws IOException {
  26.455 +            seekRelative(offset + 8 + fixedSize + fixedSize % 2);
  26.456 +        }
  26.457 +
  26.458 +        @Override
  26.459 +        public void finish() throws IOException {
  26.460 +            if (!finished) {
  26.461 +                finished = true;
  26.462 +            }
  26.463 +        }
  26.464 +
  26.465 +        @Override
  26.466 +        public long size() {
  26.467 +            return 8 + fixedSize;
  26.468 +        }
  26.469 +    }
  26.470 +
  26.471 +    /**
  26.472 +     * Creates a new AVI file with the specified video format and
  26.473 +     * frame rate. The video has 24 bits per pixel.
  26.474 +     *
  26.475 +     * @param file the output file
  26.476 +     * @param format Selects an encoder for the video format.
  26.477 +     * @param bitsPerPixel the number of bits per pixel.
  26.478 +     * @exception IllegalArgumentException if videoFormat is null or if
  26.479 +     * frame rate is <= 0
  26.480 +     */
  26.481 +    public AVIOutputStream(File file, VideoFormat format) throws IOException {
  26.482 +        this(file,format,24);
  26.483 +    }
  26.484 +    /**
  26.485 +     * Creates a new AVI file with the specified video format and
  26.486 +     * frame rate.
  26.487 +     *
  26.488 +     * @param file the output file
  26.489 +     * @param format Selects an encoder for the video format.
  26.490 +     * @param bitsPerPixel the number of bits per pixel.
  26.491 +     * @exception IllegalArgumentException if videoFormat is null or if
  26.492 +     * frame rate is <= 0
  26.493 +     */
  26.494 +    public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
  26.495 +        if (format == null) {
  26.496 +            throw new IllegalArgumentException("format must not be null");
  26.497 +        }
  26.498 +
  26.499 +        if (file.exists()) {
  26.500 +            file.delete();
  26.501 +        }
  26.502 +        this.out = new FileImageOutputStream(file);
  26.503 +        this.streamOffset = 0;
  26.504 +        this.videoFormat = format;
  26.505 +        this.videoFrames = new LinkedList<Sample>();
  26.506 +        this.imgDepth = bitsPerPixel;
  26.507 +        if (imgDepth == 4) {
  26.508 +            byte[] gray = new byte[16];
  26.509 +            for (int i = 0; i < gray.length; i++) {
  26.510 +                gray[i] = (byte) ((i << 4) | i);
  26.511 +            }
  26.512 +            palette = new IndexColorModel(4, 16, gray, gray, gray);
  26.513 +        } else if (imgDepth == 8) {
  26.514 +            byte[] gray = new byte[256];
  26.515 +            for (int i = 0; i < gray.length; i++) {
  26.516 +                gray[i] = (byte) i;
  26.517 +            }
  26.518 +            palette = new IndexColorModel(8, 256, gray, gray, gray);
  26.519 +        }
  26.520 +
  26.521 +    }
  26.522 +
  26.523 +    /**
  26.524 +     * Creates a new AVI output stream with the specified video format and
  26.525 +     * framerate.
  26.526 +     *
  26.527 +     * @param out the underlying output stream
  26.528 +     * @param format Selects an encoder for the video format.
  26.529 +     * @exception IllegalArgumentException if videoFormat is null or if
  26.530 +     * framerate is <= 0
  26.531 +     */
  26.532 +    public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
  26.533 +        if (format == null) {
  26.534 +            throw new IllegalArgumentException("format must not be null");
  26.535 +        }
  26.536 +        this.out = out;
  26.537 +        this.streamOffset = out.getStreamPosition();
  26.538 +        this.videoFormat = format;
  26.539 +        this.videoFrames = new LinkedList<Sample>();
  26.540 +    }
  26.541 +
  26.542 +    /**
  26.543 +     * Used with frameRate to specify the time scale that this stream will use.
  26.544 +     * Dividing frameRate by timeScale gives the number of samples per second.
  26.545 +     * For video streams, this is the frame rate. For audio streams, this rate
  26.546 +     * corresponds to the time needed to play nBlockAlign bytes of audio, which
  26.547 +     * for PCM audio is the just the sample rate.
  26.548 +     * <p>
  26.549 +     * The default value is 1.
  26.550 +     *
  26.551 +     * @param newValue
  26.552 +     */
  26.553 +    public void setTimeScale(int newValue) {
  26.554 +        if (newValue <= 0) {
  26.555 +            throw new IllegalArgumentException("timeScale must be greater 0");
  26.556 +        }
  26.557 +        this.timeScale = newValue;
  26.558 +    }
  26.559 +
  26.560 +    /**
  26.561 +     * Returns the time scale of this media.
  26.562 +     *
  26.563 +     * @return time scale
  26.564 +     */
  26.565 +    public int getTimeScale() {
  26.566 +        return timeScale;
  26.567 +    }
  26.568 +
  26.569 +    /**
  26.570 +     * Sets the rate of video frames in time scale units.
  26.571 +     * <p>
  26.572 +     * The default value is 30. Together with the default value 1 of timeScale
  26.573 +     * this results in 30 frames pers second.
  26.574 +     *
  26.575 +     * @param newValue
  26.576 +     */
  26.577 +    public void setFrameRate(int newValue) {
  26.578 +        if (newValue <= 0) {
  26.579 +            throw new IllegalArgumentException("frameDuration must be greater 0");
  26.580 +        }
  26.581 +        if (state == States.STARTED) {
  26.582 +            throw new IllegalStateException("frameDuration must be set before the first frame is written");
  26.583 +        }
  26.584 +        this.frameRate = newValue;
  26.585 +    }
  26.586 +
  26.587 +    /**
  26.588 +     * Returns the frame rate of this media.
  26.589 +     *
  26.590 +     * @return frame rate
  26.591 +     */
  26.592 +    public int getFrameRate() {
  26.593 +        return frameRate;
  26.594 +    }
  26.595 +
  26.596 +    /** Sets the global color palette. */
  26.597 +    public void setPalette(IndexColorModel palette) {
  26.598 +        this.palette = palette;
  26.599 +    }
  26.600 +
  26.601 +    /**
  26.602 +     * Sets the compression quality of the video track.
  26.603 +     * A value of 0 stands for "high compression is important" a value of
  26.604 +     * 1 for "high image quality is important".
  26.605 +     * <p>
  26.606 +     * Changing this value affects frames which are subsequently written
  26.607 +     * to the AVIOutputStream. Frames which have already been written
  26.608 +     * are not changed.
  26.609 +     * <p>
  26.610 +     * This value has only effect on videos encoded with JPG format.
  26.611 +     * <p>
  26.612 +     * The default value is 0.9.
  26.613 +     *
  26.614 +     * @param newValue
  26.615 +     */
  26.616 +    public void setVideoCompressionQuality(float newValue) {
  26.617 +        this.quality = newValue;
  26.618 +    }
  26.619 +
  26.620 +    /**
  26.621 +     * Returns the video compression quality.
  26.622 +     *
  26.623 +     * @return video compression quality
  26.624 +     */
  26.625 +    public float getVideoCompressionQuality() {
  26.626 +        return quality;
  26.627 +    }
  26.628 +
  26.629 +    /**
  26.630 +     * Sets the dimension of the video track.
  26.631 +     * <p>
  26.632 +     * You need to explicitly set the dimension, if you add all frames from
  26.633 +     * files or input streams.
  26.634 +     * <p>
  26.635 +     * If you add frames from buffered images, then AVIOutputStream
  26.636 +     * can determine the video dimension from the image width and height.
  26.637 +     *
  26.638 +     * @param width Must be greater than 0.
  26.639 +     * @param height Must be greater than 0.
  26.640 +     */
  26.641 +    public void setVideoDimension(int width, int height) {
  26.642 +        if (width < 1 || height < 1) {
  26.643 +            throw new IllegalArgumentException("width and height must be greater zero.");
  26.644 +        }
  26.645 +        this.imgWidth = width;
  26.646 +        this.imgHeight = height;
  26.647 +    }
  26.648 +
  26.649 +    /**
  26.650 +     * Gets the dimension of the video track.
  26.651 +     * <p>
  26.652 +     * Returns null if the dimension is not known.
  26.653 +     */
  26.654 +    public Dimension getVideoDimension() {
  26.655 +        if (imgWidth < 1 || imgHeight < 1) {
  26.656 +            return null;
  26.657 +        }
  26.658 +        return new Dimension(imgWidth, imgHeight);
  26.659 +    }
  26.660 +
  26.661 +    /**
  26.662 +     * Sets the state of the QuickTimeOutpuStream to started.
  26.663 +     * <p>
  26.664 +     * If the state is changed by this method, the prolog is
  26.665 +     * written.
  26.666 +     */
  26.667 +    private void ensureStarted() throws IOException {
  26.668 +        if (state != States.STARTED) {
  26.669 +            creationTime = new Date();
  26.670 +            writeProlog();
  26.671 +            state = States.STARTED;
  26.672 +        }
  26.673 +    }
  26.674 +
  26.675 +    /**
  26.676 +     * Writes a frame to the video track.
  26.677 +     * <p>
  26.678 +     * If the dimension of the video track has not been specified yet, it
  26.679 +     * is derived from the first buffered image added to the AVIOutputStream.
  26.680 +     *
  26.681 +     * @param image The frame image.
  26.682 +     *
  26.683 +     * @throws IllegalArgumentException if the duration is less than 1, or
  26.684 +     * if the dimension of the frame does not match the dimension of the video
  26.685 +     * track.
  26.686 +     * @throws IOException if writing the image failed.
  26.687 +     */
  26.688 +    public void writeFrame(BufferedImage image) throws IOException {
  26.689 +        ensureOpen();
  26.690 +        ensureStarted();
  26.691 +
  26.692 +        // Get the dimensions of the first image
  26.693 +        if (imgWidth == -1) {
  26.694 +            imgWidth = image.getWidth();
  26.695 +            imgHeight = image.getHeight();
  26.696 +        } else {
  26.697 +            // The dimension of the image must match the dimension of the video track
  26.698 +            if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
  26.699 +                throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
  26.700 +                        + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
  26.701 +                        + ") differs from image[0] (width="
  26.702 +                        + imgWidth + ", height=" + imgHeight);
  26.703 +            }
  26.704 +        }
  26.705 +
  26.706 +        DataChunk videoFrameChunk;
  26.707 +        long offset = getRelativeStreamPosition();
  26.708 +        boolean isSync = true;
  26.709 +        switch (videoFormat) {
  26.710 +            case RAW: {
  26.711 +                switch (imgDepth) {
  26.712 +                    case 4: {
  26.713 +                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
  26.714 +                        int[] imgRGBs = new int[16];
  26.715 +                        imgPalette.getRGBs(imgRGBs);
  26.716 +                        int[] previousRGBs = new int[16];
  26.717 +                        if (previousPalette == null) {
  26.718 +                            previousPalette = palette;
  26.719 +                        }
  26.720 +                        previousPalette.getRGBs(previousRGBs);
  26.721 +                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
  26.722 +                            previousPalette = imgPalette;
  26.723 +                            DataChunk paletteChangeChunk = new DataChunk("00pc");
  26.724 +                            /*
  26.725 +                            int first = imgPalette.getMapSize();
  26.726 +                            int last = -1;
  26.727 +                            for (int i = 0; i < 16; i++) {
  26.728 +                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
  26.729 +                            first = i;
  26.730 +                            }
  26.731 +                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
  26.732 +                            last = i;
  26.733 +                            }
  26.734 +                            }*/
  26.735 +                            int first = 0;
  26.736 +                            int last = imgPalette.getMapSize() - 1;
  26.737 +                            /*
  26.738 +                             * typedef struct {
  26.739 +                            BYTE         bFirstEntry;
  26.740 +                            BYTE         bNumEntries;
  26.741 +                            WORD         wFlags;
  26.742 +                            PALETTEENTRY peNew[];
  26.743 +                            } AVIPALCHANGE;
  26.744 +                             *
  26.745 +                             * typedef struct tagPALETTEENTRY {
  26.746 +                            BYTE peRed;
  26.747 +                            BYTE peGreen;
  26.748 +                            BYTE peBlue;
  26.749 +                            BYTE peFlags;
  26.750 +                            } PALETTEENTRY;
  26.751 +                             */
  26.752 +                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
  26.753 +                            pOut.writeByte(first);//bFirstEntry
  26.754 +                            pOut.writeByte(last - first + 1);//bNumEntries
  26.755 +                            pOut.writeShort(0);//wFlags
  26.756 +
  26.757 +                            for (int i = first; i <= last; i++) {
  26.758 +                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
  26.759 +                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
  26.760 +                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
  26.761 +                                pOut.writeByte(0); // reserved*/
  26.762 +                            }
  26.763 +
  26.764 +                            moviChunk.add(paletteChangeChunk);
  26.765 +                            paletteChangeChunk.finish();
  26.766 +                            long length = getRelativeStreamPosition() - offset;
  26.767 +                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
  26.768 +                            offset = getRelativeStreamPosition();
  26.769 +                        }
  26.770 +
  26.771 +                        videoFrameChunk = new DataChunk("00db");
  26.772 +                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
  26.773 +                        byte[] rgb4 = new byte[imgWidth / 2];
  26.774 +                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
  26.775 +                            for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
  26.776 +                                rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
  26.777 +                            }
  26.778 +                            videoFrameChunk.getOutputStream().write(rgb4);
  26.779 +                        }
  26.780 +                        break;
  26.781 +                    }
  26.782 +                    case 8: {
  26.783 +                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
  26.784 +                        int[] imgRGBs = new int[256];
  26.785 +                        imgPalette.getRGBs(imgRGBs);
  26.786 +                        int[] previousRGBs = new int[256];
  26.787 +                        if (previousPalette == null) {
  26.788 +                            previousPalette = palette;
  26.789 +                        }
  26.790 +                        previousPalette.getRGBs(previousRGBs);
  26.791 +                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
  26.792 +                            previousPalette = imgPalette;
  26.793 +                            DataChunk paletteChangeChunk = new DataChunk("00pc");
  26.794 +                            /*
  26.795 +                            int first = imgPalette.getMapSize();
  26.796 +                            int last = -1;
  26.797 +                            for (int i = 0; i < 16; i++) {
  26.798 +                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
  26.799 +                            first = i;
  26.800 +                            }
  26.801 +                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
  26.802 +                            last = i;
  26.803 +                            }
  26.804 +                            }*/
  26.805 +                            int first = 0;
  26.806 +                            int last = imgPalette.getMapSize() - 1;
  26.807 +                            /*
  26.808 +                             * typedef struct {
  26.809 +                            BYTE         bFirstEntry;
  26.810 +                            BYTE         bNumEntries;
  26.811 +                            WORD         wFlags;
  26.812 +                            PALETTEENTRY peNew[];
  26.813 +                            } AVIPALCHANGE;
  26.814 +                             *
  26.815 +                             * typedef struct tagPALETTEENTRY {
  26.816 +                            BYTE peRed;
  26.817 +                            BYTE peGreen;
  26.818 +                            BYTE peBlue;
  26.819 +                            BYTE peFlags;
  26.820 +                            } PALETTEENTRY;
  26.821 +                             */
  26.822 +                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
  26.823 +                            pOut.writeByte(first);//bFirstEntry
  26.824 +                            pOut.writeByte(last - first + 1);//bNumEntries
  26.825 +                            pOut.writeShort(0);//wFlags
  26.826 +
  26.827 +                            for (int i = first; i <= last; i++) {
  26.828 +                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
  26.829 +                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
  26.830 +                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
  26.831 +                                pOut.writeByte(0); // reserved*/
  26.832 +                            }
  26.833 +
  26.834 +                            moviChunk.add(paletteChangeChunk);
  26.835 +                            paletteChangeChunk.finish();
  26.836 +                            long length = getRelativeStreamPosition() - offset;
  26.837 +                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
  26.838 +                            offset = getRelativeStreamPosition();
  26.839 +                        }
  26.840 +
  26.841 +                        videoFrameChunk = new DataChunk("00db");
  26.842 +                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
  26.843 +                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
  26.844 +                            videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
  26.845 +                        }
  26.846 +                        break;
  26.847 +                    }
  26.848 +                    default: {
  26.849 +                        videoFrameChunk = new DataChunk("00db");
  26.850 +                        WritableRaster raster = image.getRaster();
  26.851 +                        int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
  26.852 +                        byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
  26.853 +                        for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
  26.854 +                            raster.getPixels(0, y, imgWidth, 1, raw);
  26.855 +                            for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
  26.856 +                                bytes[x + 2] = (byte) raw[x]; // Blue
  26.857 +                                bytes[x + 1] = (byte) raw[x + 1]; // Green
  26.858 +                                bytes[x] = (byte) raw[x + 2]; // Red
  26.859 +                            }
  26.860 +                            videoFrameChunk.getOutputStream().write(bytes);
  26.861 +                        }
  26.862 +                        break;
  26.863 +                    }
  26.864 +                }
  26.865 +                break;
  26.866 +            }
  26.867 +            
  26.868 +            case JPG: {
  26.869 +                videoFrameChunk = new DataChunk("00dc");
  26.870 +                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
  26.871 +                ImageWriteParam iwParam = iw.getDefaultWriteParam();
  26.872 +                iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
  26.873 +                iwParam.setCompressionQuality(quality);
  26.874 +                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
  26.875 +                iw.setOutput(imgOut);
  26.876 +                IIOImage img = new IIOImage(image, null, null);
  26.877 +                iw.write(null, img, iwParam);
  26.878 +                iw.dispose();
  26.879 +                break;
  26.880 +            }
  26.881 +            case PNG:
  26.882 +            default: {
  26.883 +                videoFrameChunk = new DataChunk("00dc");
  26.884 +                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
  26.885 +                ImageWriteParam iwParam = iw.getDefaultWriteParam();
  26.886 +                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
  26.887 +                iw.setOutput(imgOut);
  26.888 +                IIOImage img = new IIOImage(image, null, null);
  26.889 +                iw.write(null, img, iwParam);
  26.890 +                iw.dispose();
  26.891 +                break;
  26.892 +            }
  26.893 +        }
  26.894 +        long length = getRelativeStreamPosition() - offset;
  26.895 +        moviChunk.add(videoFrameChunk);
  26.896 +        videoFrameChunk.finish();
  26.897 +
  26.898 +        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
  26.899 +        if (getRelativeStreamPosition() > 1L << 32) {
  26.900 +            throw new IOException("AVI file is larger than 4 GB");
  26.901 +        }
  26.902 +    }
  26.903 +
  26.904 +    /**
  26.905 +     * Writes a frame from a file to the video track.
  26.906 +     * <p>
  26.907 +     * This method does not inspect the contents of the file.
  26.908 +     * For example, Its your responsibility to only add JPG files if you have
  26.909 +     * chosen the JPEG video format.
  26.910 +     * <p>
  26.911 +     * If you add all frames from files or from input streams, then you
  26.912 +     * have to explicitly set the dimension of the video track before you
  26.913 +     * call finish() or close().
  26.914 +     *
  26.915 +     * @param file The file which holds the image data.
  26.916 +     *
  26.917 +     * @throws IllegalStateException if the duration is less than 1.
  26.918 +     * @throws IOException if writing the image failed.
  26.919 +     */
  26.920 +    public void writeFrame(File file) throws IOException {
  26.921 +        FileInputStream in = null;
  26.922 +        try {
  26.923 +            in = new FileInputStream(file);
  26.924 +            writeFrame(in);
  26.925 +        } finally {
  26.926 +            if (in != null) {
  26.927 +                in.close();
  26.928 +            }
  26.929 +        }
  26.930 +    }
  26.931 +
  26.932 +    /**
  26.933 +     * Writes a frame to the video track.
  26.934 +     * <p>
  26.935 +     * This method does not inspect the contents of the file.
  26.936 +     * For example, its your responsibility to only add JPG files if you have
  26.937 +     * chosen the JPEG video format.
  26.938 +     * <p>
  26.939 +     * If you add all frames from files or from input streams, then you
  26.940 +     * have to explicitly set the dimension of the video track before you
  26.941 +     * call finish() or close().
  26.942 +     *
  26.943 +     * @param in The input stream which holds the image data.
  26.944 +     *
  26.945 +     * @throws IllegalArgumentException if the duration is less than 1.
  26.946 +     * @throws IOException if writing the image failed.
  26.947 +     */
  26.948 +    public void writeFrame(InputStream in) throws IOException {
  26.949 +        ensureOpen();
  26.950 +        ensureStarted();
  26.951 +
  26.952 +        DataChunk videoFrameChunk = new DataChunk(
  26.953 +                videoFormat == VideoFormat.RAW ? "00db" : "00dc");
  26.954 +        moviChunk.add(videoFrameChunk);
  26.955 +        OutputStream mdatOut = videoFrameChunk.getOutputStream();
  26.956 +        long offset = getRelativeStreamPosition();
  26.957 +        byte[] buf = new byte[512];
  26.958 +        int len;
  26.959 +        while ((len = in.read(buf)) != -1) {
  26.960 +            mdatOut.write(buf, 0, len);
  26.961 +        }
  26.962 +        long length = getRelativeStreamPosition() - offset;
  26.963 +        videoFrameChunk.finish();
  26.964 +        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
  26.965 +        if (getRelativeStreamPosition() > 1L << 32) {
  26.966 +            throw new IOException("AVI file is larger than 4 GB");
  26.967 +        }
  26.968 +    }
  26.969 +
  26.970 +    /**
  26.971 +     * Closes the movie file as well as the stream being filtered.
  26.972 +     *
  26.973 +     * @exception IOException if an I/O error has occurred
  26.974 +     */
  26.975 +    public void close() throws IOException {
  26.976 +        if (state == States.STARTED) {
  26.977 +            finish();
  26.978 +        }
  26.979 +        if (state != States.CLOSED) {
  26.980 +            out.close();
  26.981 +            state = States.CLOSED;
  26.982 +        }
  26.983 +    }
  26.984 +
  26.985 +    /**
  26.986 +     * Finishes writing the contents of the AVI output stream without closing
  26.987 +     * the underlying stream. Use this method when applying multiple filters
  26.988 +     * in succession to the same output stream.
  26.989 +     *
  26.990 +     * @exception IllegalStateException if the dimension of the video track
  26.991 +     * has not been specified or determined yet.
  26.992 +     * @exception IOException if an I/O exception has occurred
  26.993 +     */
  26.994 +    public void finish() throws IOException {
  26.995 +        ensureOpen();
  26.996 +        if (state != States.FINISHED) {
  26.997 +            if (imgWidth == -1 || imgHeight == -1) {
  26.998 +                throw new IllegalStateException("image width and height must be specified");
  26.999 +            }
 26.1000 +
 26.1001 +            moviChunk.finish();
 26.1002 +            writeEpilog();
 26.1003 +            state = States.FINISHED;
 26.1004 +            imgWidth = imgHeight = -1;
 26.1005 +        }
 26.1006 +    }
 26.1007 +
 26.1008 +    /**
 26.1009 +     * Check to make sure that this stream has not been closed
 26.1010 +     */
 26.1011 +    private void ensureOpen() throws IOException {
 26.1012 +        if (state == States.CLOSED) {
 26.1013 +            throw new IOException("Stream closed");
 26.1014 +        }
 26.1015 +    }
 26.1016 +
 26.1017 +    /** Gets the position relative to the beginning of the QuickTime stream.
 26.1018 +     * <p>
 26.1019 +     * Usually this value is equal to the stream position of the underlying
 26.1020 +     * ImageOutputStream, but can be larger if the underlying stream already
 26.1021 +     * contained data.
 26.1022 +     *
 26.1023 +     * @return The relative stream position.
 26.1024 +     * @throws IOException
 26.1025 +     */
 26.1026 +    private long getRelativeStreamPosition() throws IOException {
 26.1027 +        return out.getStreamPosition() - streamOffset;
 26.1028 +    }
 26.1029 +
 26.1030 +    /** Seeks relative to the beginning of the QuickTime stream.
 26.1031 +     * <p>
 26.1032 +     * Usually this equal to seeking in the underlying ImageOutputStream, but
 26.1033 +     * can be different if the underlying stream already contained data.
 26.1034 +     *
 26.1035 +     */
 26.1036 +    private void seekRelative(long newPosition) throws IOException {
 26.1037 +        out.seek(newPosition + streamOffset);
 26.1038 +    }
 26.1039 +
 26.1040 +    private void writeProlog() throws IOException {
 26.1041 +        // The file has the following structure:
 26.1042 +        //
 26.1043 +        // .RIFF AVI
 26.1044 +        // ..avih (AVI Header Chunk)
 26.1045 +        // ..LIST strl
 26.1046 +        // ...strh (Stream Header Chunk)
 26.1047 +        // ...strf (Stream Format Chunk)
 26.1048 +        // ..LIST movi
 26.1049 +        // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
 26.1050 +        // ..idx1 (List of video data chunks and their location in the file)
 26.1051 +
 26.1052 +        // The RIFF AVI Chunk holds the complete movie
 26.1053 +        aviChunk = new CompositeChunk("RIFF", "AVI ");
 26.1054 +        CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
 26.1055 +
 26.1056 +        // Write empty AVI Main Header Chunk - we fill the data in later
 26.1057 +        aviChunk.add(hdrlChunk);
 26.1058 +        avihChunk = new FixedSizeDataChunk("avih", 56);
 26.1059 +        avihChunk.seekToEndOfChunk();
 26.1060 +        hdrlChunk.add(avihChunk);
 26.1061 +
 26.1062 +        CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
 26.1063 +        hdrlChunk.add(strlChunk);
 26.1064 +
 26.1065 +        // Write empty AVI Stream Header Chunk - we fill the data in later
 26.1066 +        strhChunk = new FixedSizeDataChunk("strh", 56);
 26.1067 +        strhChunk.seekToEndOfChunk();
 26.1068 +        strlChunk.add(strhChunk);
 26.1069 +        strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
 26.1070 +        strfChunk.seekToEndOfChunk();
 26.1071 +        strlChunk.add(strfChunk);
 26.1072 +
 26.1073 +        moviChunk = new CompositeChunk("LIST", "movi");
 26.1074 +        aviChunk.add(moviChunk);
 26.1075 +
 26.1076 +
 26.1077 +    }
 26.1078 +
 26.1079 +    private void writeEpilog() throws IOException {
 26.1080 +        // Compute values
 26.1081 +        int duration = 0;
 26.1082 +        for (Sample s : videoFrames) {
 26.1083 +            duration += s.duration;
 26.1084 +        }
 26.1085 +        long bufferSize = 0;
 26.1086 +        for (Sample s : videoFrames) {
 26.1087 +            if (s.length > bufferSize) {
 26.1088 +                bufferSize = s.length;
 26.1089 +            }
 26.1090 +        }
 26.1091 +
 26.1092 +
 26.1093 +        DataChunkOutputStream d;
 26.1094 +
 26.1095 +        /* Create Idx1 Chunk and write data
 26.1096 +         * -------------
 26.1097 +        typedef struct _avioldindex {
 26.1098 +        FOURCC  fcc;
 26.1099 +        DWORD   cb;
 26.1100 +        struct _avioldindex_entry {
 26.1101 +        DWORD   dwChunkId;
 26.1102 +        DWORD   dwFlags;
 26.1103 +        DWORD   dwOffset;
 26.1104 +        DWORD   dwSize;
 26.1105 +        } aIndex[];
 26.1106 +        } AVIOLDINDEX;
 26.1107 +         */
 26.1108 +        DataChunk idx1Chunk = new DataChunk("idx1");
 26.1109 +        aviChunk.add(idx1Chunk);
 26.1110 +        d = idx1Chunk.getOutputStream();
 26.1111 +        long moviListOffset = moviChunk.offset + 8;
 26.1112 +        //moviListOffset = 0;
 26.1113 +        for (Sample f : videoFrames) {
 26.1114 +
 26.1115 +            d.writeType(f.chunkType); // dwChunkId
 26.1116 +            // Specifies a FOURCC that identifies a stream in the AVI file. The
 26.1117 +            // FOURCC must have the form 'xxyy' where xx is the stream number and yy
 26.1118 +            // is a two-character code that identifies the contents of the stream:
 26.1119 +            //
 26.1120 +            // Two-character code   Description
 26.1121 +            //  db                  Uncompressed video frame
 26.1122 +            //  dc                  Compressed video frame
 26.1123 +            //  pc                  Palette change
 26.1124 +            //  wb                  Audio data
 26.1125 +
 26.1126 +            d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
 26.1127 +                    | (f.isSync ? 0x10 : 0x0)); // dwFlags
 26.1128 +            // Specifies a bitwise combination of zero or more of the following
 26.1129 +            // flags:
 26.1130 +            //
 26.1131 +            // Value    Name            Description
 26.1132 +            // 0x10     AVIIF_KEYFRAME  The data chunk is a key frame.
 26.1133 +            // 0x1      AVIIF_LIST      The data chunk is a 'rec ' list.
 26.1134 +            // 0x100    AVIIF_NO_TIME   The data chunk does not affect the timing of the
 26.1135 +            //                          stream. For example, this flag should be set for
 26.1136 +            //                          palette changes.
 26.1137 +
 26.1138 +            d.writeUInt(f.offset - moviListOffset); // dwOffset
 26.1139 +            // Specifies the location of the data chunk in the file. The value
 26.1140 +            // should be specified as an offset, in bytes, from the start of the
 26.1141 +            // 'movi' list; however, in some AVI files it is given as an offset from
 26.1142 +            // the start of the file.
 26.1143 +
 26.1144 +            d.writeUInt(f.length); // dwSize
 26.1145 +            // Specifies the size of the data chunk, in bytes.
 26.1146 +        }
 26.1147 +        idx1Chunk.finish();
 26.1148 +
 26.1149 +        /* Write Data into AVI Main Header Chunk
 26.1150 +         * -------------
 26.1151 +         * The AVIMAINHEADER structure defines global information in an AVI file.
 26.1152 +         * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
 26.1153 +        typedef struct _avimainheader {
 26.1154 +        FOURCC fcc;
 26.1155 +        DWORD  cb;
 26.1156 +        DWORD  dwMicroSecPerFrame;
 26.1157 +        DWORD  dwMaxBytesPerSec;
 26.1158 +        DWORD  dwPaddingGranularity;
 26.1159 +        DWORD  dwFlags;
 26.1160 +        DWORD  dwTotalFrames;
 26.1161 +        DWORD  dwInitialFrames;
 26.1162 +        DWORD  dwStreams;
 26.1163 +        DWORD  dwSuggestedBufferSize;
 26.1164 +        DWORD  dwWidth;
 26.1165 +        DWORD  dwHeight;
 26.1166 +        DWORD  dwReserved[4];
 26.1167 +        } AVIMAINHEADER; */
 26.1168 +        avihChunk.seekToStartOfData();
 26.1169 +        d = avihChunk.getOutputStream();
 26.1170 +
 26.1171 +        d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
 26.1172 +        // Specifies the number of microseconds between frames.
 26.1173 +        // This value indicates the overall timing for the file.
 26.1174 +
 26.1175 +        d.writeUInt(0); // dwMaxBytesPerSec
 26.1176 +        // Specifies the approximate maximum data rate of the file.
 26.1177 +        // This value indicates the number of bytes per second the system
 26.1178 +        // must handle to present an AVI sequence as specified by the other
 26.1179 +        // parameters contained in the main header and stream header chunks.
 26.1180 +
 26.1181 +        d.writeUInt(0); // dwPaddingGranularity
 26.1182 +        // Specifies the alignment for data, in bytes. Pad the data to multiples
 26.1183 +        // of this value.
 26.1184 +
 26.1185 +        d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
 26.1186 +        // Contains a bitwise combination of zero or more of the following
 26.1187 +        // flags:
 26.1188 +        //
 26.1189 +        // Value   Name         Description
 26.1190 +        // 0x10    AVIF_HASINDEX Indicates the AVI file has an index.
 26.1191 +        // 0x20    AVIF_MUSTUSEINDEX Indicates that application should use the
 26.1192 +        //                      index, rather than the physical ordering of the
 26.1193 +        //                      chunks in the file, to determine the order of
 26.1194 +        //                      presentation of the data. For example, this flag
 26.1195 +        //                      could be used to create a list of frames for
 26.1196 +        //                      editing.
 26.1197 +        // 0x100   AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
 26.1198 +        // 0x1000  AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
 26.1199 +        //                      allocated file used for capturing real-time
 26.1200 +        //                      video. Applications should warn the user before
 26.1201 +        //                      writing over a file with this flag set because
 26.1202 +        //                      the user probably defragmented this file.
 26.1203 +        // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
 26.1204 +        //                      data and software. When this flag is used,
 26.1205 +        //                      software should not permit the data to be
 26.1206 +        //                      duplicated.
 26.1207 +
 26.1208 +        d.writeUInt(videoFrames.size()); // dwTotalFrames
 26.1209 +        // Specifies the total number of frames of data in the file.
 26.1210 +
 26.1211 +        d.writeUInt(0); // dwInitialFrames
 26.1212 +        // Specifies the initial frame for interleaved files. Noninterleaved
 26.1213 +        // files should specify zero. If you are creating interleaved files,
 26.1214 +        // specify the number of frames in the file prior to the initial frame
 26.1215 +        // of the AVI sequence in this member.
 26.1216 +        // To give the audio driver enough audio to work with, the audio data in
 26.1217 +        // an interleaved file must be skewed from the video data. Typically,
 26.1218 +        // the audio data should be moved forward enough frames to allow
 26.1219 +        // approximately 0.75 seconds of audio data to be preloaded. The
 26.1220 +        // dwInitialRecords member should be set to the number of frames the
 26.1221 +        // audio is skewed. Also set the same value for the dwInitialFrames
 26.1222 +        // member of the AVISTREAMHEADER structure in the audio stream header
 26.1223 +
 26.1224 +        d.writeUInt(1); // dwStreams
 26.1225 +        // Specifies the number of streams in the file. For example, a file with
 26.1226 +        // audio and video has two streams.
 26.1227 +
 26.1228 +        d.writeUInt(bufferSize); // dwSuggestedBufferSize
 26.1229 +        // Specifies the suggested buffer size for reading the file. Generally,
 26.1230 +        // this size should be large enough to contain the largest chunk in the
 26.1231 +        // file. If set to zero, or if it is too small, the playback software
 26.1232 +        // will have to reallocate memory during playback, which will reduce
 26.1233 +        // performance. For an interleaved file, the buffer size should be large
 26.1234 +        // enough to read an entire record, and not just a chunk.
 26.1235 +
 26.1236 +
 26.1237 +        d.writeUInt(imgWidth); // dwWidth
 26.1238 +        // Specifies the width of the AVI file in pixels.
 26.1239 +
 26.1240 +        d.writeUInt(imgHeight); // dwHeight
 26.1241 +        // Specifies the height of the AVI file in pixels.
 26.1242 +
 26.1243 +        d.writeUInt(0); // dwReserved[0]
 26.1244 +        d.writeUInt(0); // dwReserved[1]
 26.1245 +        d.writeUInt(0); // dwReserved[2]
 26.1246 +        d.writeUInt(0); // dwReserved[3]
 26.1247 +        // Reserved. Set this array to zero.
 26.1248 +
 26.1249 +        /* Write Data into AVI Stream Header Chunk
 26.1250 +         * -------------
 26.1251 +         * The AVISTREAMHEADER structure contains information about one stream
 26.1252 +         * in an AVI file.
 26.1253 +         * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
 26.1254 +        typedef struct _avistreamheader {
 26.1255 +        FOURCC fcc;
 26.1256 +        DWORD  cb;
 26.1257 +        FOURCC fccType;
 26.1258 +        FOURCC fccHandler;
 26.1259 +        DWORD  dwFlags;
 26.1260 +        WORD   wPriority;
 26.1261 +        WORD   wLanguage;
 26.1262 +        DWORD  dwInitialFrames;
 26.1263 +        DWORD  dwScale;
 26.1264 +        DWORD  dwRate;
 26.1265 +        DWORD  dwStart;
 26.1266 +        DWORD  dwLength;
 26.1267 +        DWORD  dwSuggestedBufferSize;
 26.1268 +        DWORD  dwQuality;
 26.1269 +        DWORD  dwSampleSize;
 26.1270 +        struct {
 26.1271 +        short int left;
 26.1272 +        short int top;
 26.1273 +        short int right;
 26.1274 +        short int bottom;
 26.1275 +        }  rcFrame;
 26.1276 +        } AVISTREAMHEADER;
 26.1277 +         */
 26.1278 +        strhChunk.seekToStartOfData();
 26.1279 +        d = strhChunk.getOutputStream();
 26.1280 +        d.writeType("vids"); // fccType - vids for video stream
 26.1281 +        // Contains a FOURCC that specifies the type of the data contained in
 26.1282 +        // the stream. The following standard AVI values for video and audio are
 26.1283 +        // defined:
 26.1284 +        //
 26.1285 +        // FOURCC   Description
 26.1286 +        // 'auds'   Audio stream
 26.1287 +        // 'mids'   MIDI stream
 26.1288 +        // 'txts'   Text stream
 26.1289 +        // 'vids'   Video stream
 26.1290 +
 26.1291 +        switch (videoFormat) {
 26.1292 +            case RAW:
 26.1293 +                d.writeType("DIB "); // fccHandler - DIB for Raw RGB
 26.1294 +                break;
 26.1295 +            case RLE:
 26.1296 +                d.writeType("RLE "); // fccHandler - Microsoft RLE
 26.1297 +                break;
 26.1298 +            case JPG:
 26.1299 +                d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
 26.1300 +                break;
 26.1301 +            case PNG:
 26.1302 +            default:
 26.1303 +                d.writeType("png "); // fccHandler - png for PNG
 26.1304 +                break;
 26.1305 +        }
 26.1306 +        // Optionally, contains a FOURCC that identifies a specific data
 26.1307 +        // handler. The data handler is the preferred handler for the stream.
 26.1308 +        // For audio and video streams, this specifies the codec for decoding
 26.1309 +        // the stream.
 26.1310 +
 26.1311 +        if (imgDepth <= 8) {
 26.1312 +            d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
 26.1313 +        } else {
 26.1314 +            d.writeUInt(0); // dwFlags
 26.1315 +        }
 26.1316 +
 26.1317 +        // Contains any flags for the data stream. The bits in the high-order
 26.1318 +        // word of these flags are specific to the type of data contained in the
 26.1319 +        // stream. The following standard flags are defined:
 26.1320 +        //
 26.1321 +        // Value    Name        Description
 26.1322 +        //          AVISF_DISABLED 0x00000001 Indicates this stream should not
 26.1323 +        //                      be enabled by default.
 26.1324 +        //          AVISF_VIDEO_PALCHANGES 0x00010000
 26.1325 +        //                      Indicates this video stream contains
 26.1326 +        //                      palette changes. This flag warns the playback
 26.1327 +        //                      software that it will need to animate the
 26.1328 +        //                      palette.
 26.1329 +
 26.1330 +        d.writeUShort(0); // wPriority
 26.1331 +        // Specifies priority of a stream type. For example, in a file with
 26.1332 +        // multiple audio streams, the one with the highest priority might be
 26.1333 +        // the default stream.
 26.1334 +
 26.1335 +        d.writeUShort(0); // wLanguage
 26.1336 +        // Language tag.
 26.1337 +
 26.1338 +        d.writeUInt(0); // dwInitialFrames
 26.1339 +        // Specifies how far audio data is skewed ahead of the video frames in
 26.1340 +        // interleaved files. Typically, this is about 0.75 seconds. If you are
 26.1341 +        // creating interleaved files, specify the number of frames in the file
 26.1342 +        // prior to the initial frame of the AVI sequence in this member. For
 26.1343 +        // more information, see the remarks for the dwInitialFrames member of
 26.1344 +        // the AVIMAINHEADER structure.
 26.1345 +
 26.1346 +        d.writeUInt(timeScale); // dwScale
 26.1347 +        // Used with dwRate to specify the time scale that this stream will use.
 26.1348 +        // Dividing dwRate by dwScale gives the number of samples per second.
 26.1349 +        // For video streams, this is the frame rate. For audio streams, this
 26.1350 +        // rate corresponds to the time needed to play nBlockAlign bytes of
 26.1351 +        // audio, which for PCM audio is the just the sample rate.
 26.1352 +
 26.1353 +        d.writeUInt(frameRate); // dwRate
 26.1354 +        // See dwScale.
 26.1355 +
 26.1356 +        d.writeUInt(0); // dwStart
 26.1357 +        // Specifies the starting time for this stream. The units are defined by
 26.1358 +        // the dwRate and dwScale members in the main file header. Usually, this
 26.1359 +        // is zero, but it can specify a delay time for a stream that does not
 26.1360 +        // start concurrently with the file.
 26.1361 +
 26.1362 +        d.writeUInt(videoFrames.size()); // dwLength
 26.1363 +        // Specifies the length of this stream. The units are defined by the
 26.1364 +        // dwRate and dwScale members of the stream's header.
 26.1365 +
 26.1366 +        d.writeUInt(bufferSize); // dwSuggestedBufferSize
 26.1367 +        // Specifies how large a buffer should be used to read this stream.
 26.1368 +        // Typically, this contains a value corresponding to the largest chunk
 26.1369 +        // present in the stream. Using the correct buffer size makes playback
 26.1370 +        // more efficient. Use zero if you do not know the correct buffer size.
 26.1371 +
 26.1372 +        d.writeInt(-1); // dwQuality
 26.1373 +        // Specifies an indicator of the quality of the data in the stream.
 26.1374 +        // Quality is represented as a number between 0 and 10,000.
 26.1375 +        // For compressed data, this typically represents the value of the
 26.1376 +        // quality parameter passed to the compression software. If set to –1,
 26.1377 +        // drivers use the default quality value.
 26.1378 +
 26.1379 +        d.writeUInt(0); // dwSampleSize
 26.1380 +        // Specifies the size of a single sample of data. This is set to zero
 26.1381 +        // if the samples can vary in size. If this number is nonzero, then
 26.1382 +        // multiple samples of data can be grouped into a single chunk within
 26.1383 +        // the file. If it is zero, each sample of data (such as a video frame)
 26.1384 +        // must be in a separate chunk. For video streams, this number is
 26.1385 +        // typically zero, although it can be nonzero if all video frames are
 26.1386 +        // the same size. For audio streams, this number should be the same as
 26.1387 +        // the nBlockAlign member of the WAVEFORMATEX structure describing the
 26.1388 +        // audio.
 26.1389 +
 26.1390 +        d.writeUShort(0); // rcFrame.left
 26.1391 +        d.writeUShort(0); // rcFrame.top
 26.1392 +        d.writeUShort(imgWidth); // rcFrame.right
 26.1393 +        d.writeUShort(imgHeight); // rcFrame.bottom
 26.1394 +        // Specifies the destination rectangle for a text or video stream within
 26.1395 +        // the movie rectangle specified by the dwWidth and dwHeight members of
 26.1396 +        // the AVI main header structure. The rcFrame member is typically used
 26.1397 +        // in support of multiple video streams. Set this rectangle to the
 26.1398 +        // coordinates corresponding to the movie rectangle to update the whole
 26.1399 +        // movie rectangle. Units for this member are pixels. The upper-left
 26.1400 +        // corner of the destination rectangle is relative to the upper-left
 26.1401 +        // corner of the movie rectangle.
 26.1402 +
 26.1403 +        /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
 26.1404 +        /* -------------
 26.1405 +         * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
 26.1406 +        typedef struct tagBITMAPINFOHEADER {
 26.1407 +        DWORD  biSize;
 26.1408 +        LONG   biWidth;
 26.1409 +        LONG   biHeight;
 26.1410 +        WORD   biPlanes;
 26.1411 +        WORD   biBitCount;
 26.1412 +        DWORD  biCompression;
 26.1413 +        DWORD  biSizeImage;
 26.1414 +        LONG   biXPelsPerMeter;
 26.1415 +        LONG   biYPelsPerMeter;
 26.1416 +        DWORD  biClrUsed;
 26.1417 +        DWORD  biClrImportant;
 26.1418 +        } BITMAPINFOHEADER;
 26.1419 +         */
 26.1420 +        strfChunk.seekToStartOfData();
 26.1421 +        d = strfChunk.getOutputStream();
 26.1422 +        d.writeUInt(40); // biSize
 26.1423 +        // Specifies the number of bytes required by the structure. This value
 26.1424 +        // does not include the size of the color table or the size of the color
 26.1425 +        // masks, if they are appended to the end of structure.
 26.1426 +
 26.1427 +        d.writeInt(imgWidth); // biWidth
 26.1428 +        // Specifies the width of the bitmap, in pixels.
 26.1429 +
 26.1430 +        d.writeInt(imgHeight); // biHeight
 26.1431 +        // Specifies the height of the bitmap, in pixels.
 26.1432 +        //
 26.1433 +        // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
 26.1434 +        // a bottom-up DIB with the origin at the lower left corner. If biHeight
 26.1435 +        // is negative, the bitmap is a top-down DIB with the origin at the
 26.1436 +        // upper left corner.
 26.1437 +        // For YUV bitmaps, the bitmap is always top-down, regardless of the
 26.1438 +        // sign of biHeight. Decoders should offer YUV formats with postive
 26.1439 +        // biHeight, but for backward compatibility they should accept YUV
 26.1440 +        // formats with either positive or negative biHeight.
 26.1441 +        // For compressed formats, biHeight must be positive, regardless of
 26.1442 +        // image orientation.
 26.1443 +
 26.1444 +        d.writeShort(1); // biPlanes
 26.1445 +        // Specifies the number of planes for the target device. This value must
 26.1446 +        // be set to 1.
 26.1447 +
 26.1448 +        d.writeShort(imgDepth); // biBitCount
 26.1449 +        // Specifies the number of bits per pixel (bpp).  For uncompressed
 26.1450 +        // formats, this value is the average number of bits per pixel. For
 26.1451 +        // compressed formats, this value is the implied bit depth of the
 26.1452 +        // uncompressed image, after the image has been decoded.
 26.1453 +
 26.1454 +        switch (videoFormat) {
 26.1455 +            case RAW:
 26.1456 +            default:
 26.1457 +                d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
 26.1458 +                break;
 26.1459 +            case RLE:
 26.1460 +                if (imgDepth == 8) {
 26.1461 +                    d.writeInt(1); // biCompression - BI_RLE8
 26.1462 +                } else if (imgDepth == 4) {
 26.1463 +                    d.writeInt(2); // biCompression - BI_RLE4
 26.1464 +                } else {
 26.1465 +                    throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
 26.1466 +                }
 26.1467 +                break;
 26.1468 +            case JPG:
 26.1469 +                d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
 26.1470 +                break;
 26.1471 +            case PNG:
 26.1472 +                d.writeType("png "); // biCompression - png for PNG
 26.1473 +                break;
 26.1474 +        }
 26.1475 +        // For compressed video and YUV formats, this member is a FOURCC code,
 26.1476 +        // specified as a DWORD in little-endian order. For example, YUYV video
 26.1477 +        // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
 26.1478 +        // Codes.
 26.1479 +        //
 26.1480 +        // For uncompressed RGB formats, the following values are possible:
 26.1481 +        //
 26.1482 +        // Value        Description
 26.1483 +        // BI_RGB       0x00000000 Uncompressed RGB.
 26.1484 +        // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
 26.1485 +        //                         Valid for 16-bpp and 32-bpp bitmaps.
 26.1486 +        //
 26.1487 +        // Note that BI_JPG and BI_PNG are not valid video formats.
 26.1488 +        //
 26.1489 +        // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
 26.1490 +        // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
 26.1491 +        // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
 26.1492 +        // structure to determine the specific RGB type.
 26.1493 +
 26.1494 +        switch (videoFormat) {
 26.1495 +            case RAW:
 26.1496 +                d.writeInt(0); // biSizeImage
 26.1497 +                break;
 26.1498 +            case RLE:
 26.1499 +            case JPG:
 26.1500 +            case PNG:
 26.1501 +            default:
 26.1502 +                if (imgDepth == 4) {
 26.1503 +                    d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
 26.1504 +                } else {
 26.1505 +                    int bytesPerPixel = Math.max(1, imgDepth / 8);
 26.1506 +                    d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
 26.1507 +                }
 26.1508 +                break;
 26.1509 +        }
 26.1510 +        // Specifies the size, in bytes, of the image. This can be set to 0 for
 26.1511 +        // uncompressed RGB bitmaps.
 26.1512 +
 26.1513 +        d.writeInt(0); // biXPelsPerMeter
 26.1514 +        // Specifies the horizontal resolution, in pixels per meter, of the
 26.1515 +        // target device for the bitmap.
 26.1516 +
 26.1517 +        d.writeInt(0); // biYPelsPerMeter
 26.1518 +        // Specifies the vertical resolution, in pixels per meter, of the target
 26.1519 +        // device for the bitmap.
 26.1520 +
 26.1521 +        d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
 26.1522 +        // Specifies the number of color indices in the color table that are
 26.1523 +        // actually used by the bitmap.
 26.1524 +
 26.1525 +        d.writeInt(0); // biClrImportant
 26.1526 +        // Specifies the number of color indices that are considered important
 26.1527 +        // for displaying the bitmap. If this value is zero, all colors are
 26.1528 +        // important.
 26.1529 +
 26.1530 +        if (palette != null) {
 26.1531 +            for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
 26.1532 +                /*
 26.1533 +                 * typedef struct tagRGBQUAD {
 26.1534 +                BYTE rgbBlue;
 26.1535 +                BYTE rgbGreen;
 26.1536 +                BYTE rgbRed;
 26.1537 +                BYTE rgbReserved; // This member is reserved and must be zero.
 26.1538 +                } RGBQUAD;
 26.1539 +                 */
 26.1540 +                d.write(palette.getBlue(i));
 26.1541 +                d.write(palette.getGreen(i));
 26.1542 +                d.write(palette.getRed(i));
 26.1543 +                d.write(0);
 26.1544 +            }
 26.1545 +        }
 26.1546 +
 26.1547 +
 26.1548 +        // -----------------
 26.1549 +        aviChunk.finish();
 26.1550 +    }
 26.1551 +}
    27.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    27.2 +++ b/src/com/aurellem/capture/video/AVIVideoRecorder.java	Wed Oct 26 08:54:12 2011 -0700
    27.3 @@ -0,0 +1,46 @@
    27.4 +package com.aurellem.capture.video;
    27.5 +
    27.6 +import java.awt.image.BufferedImage;
    27.7 +import java.io.File;
    27.8 +import java.io.IOException;
    27.9 +
   27.10 +
   27.11 +public class AVIVideoRecorder extends AbstractVideoRecorder{
   27.12 +
   27.13 +	AVIOutputStream out = null;
   27.14 +	boolean videoReady = false;
   27.15 +	BufferedImage frame;
   27.16 +	
   27.17 +	public AVIVideoRecorder(File output) throws IOException {
   27.18 +		super(output);
   27.19 +		this.out = new AVIOutputStream(output, AVIOutputStream.VideoFormat.PNG, 24);
   27.20 +		this.out.setVideoCompressionQuality(1.0f);
   27.21 +	}
   27.22 +
   27.23 +	
   27.24 +	public void initVideo (){
   27.25 +		frame = new BufferedImage(
   27.26 +				width, height,
   27.27 +				BufferedImage.TYPE_INT_RGB);
   27.28 +		out.setFrameRate((int) Math.round(this.fps));
   27.29 +		out.setTimeScale(1);
   27.30 +		out.setVideoDimension(width, height);
   27.31 +		this.videoReady = true;
   27.32 +	}
   27.33 +	
   27.34 +	public void record(BufferedImage rawFrame) {
   27.35 +		if (!videoReady){initVideo();}
   27.36 +		this.frame.getGraphics().drawImage(rawFrame, 0, 0, null);
   27.37 +		try {out.writeFrame(frame);}
   27.38 +		catch (IOException e){e.printStackTrace();}
   27.39 +	}
   27.40 +	
   27.41 +	public void finish() {
   27.42 +		System.out.println("I'm finished! <3");
   27.43 +		try {out.close();} 
   27.44 +		catch (IOException e) {e.printStackTrace();}
   27.45 +	}
   27.46 +
   27.47 +	
   27.48 +
   27.49 +}
    28.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    28.2 +++ b/src/com/aurellem/capture/video/AbstractVideoRecorder.java	Wed Oct 26 08:54:12 2011 -0700
    28.3 @@ -0,0 +1,144 @@
    28.4 +package com.aurellem.capture.video;
    28.5 +
    28.6 +import java.awt.image.BufferedImage;
    28.7 +import java.io.File;
    28.8 +import java.io.IOException;
    28.9 +import java.nio.ByteBuffer;
   28.10 +
   28.11 +import com.aurellem.capture.IsoTimer;
   28.12 +import com.jme3.app.Application;
   28.13 +import com.jme3.app.state.AppState;
   28.14 +import com.jme3.app.state.AppStateManager;
   28.15 +import com.jme3.post.SceneProcessor;
   28.16 +import com.jme3.renderer.Camera;
   28.17 +import com.jme3.renderer.RenderManager;
   28.18 +import com.jme3.renderer.ViewPort;
   28.19 +import com.jme3.renderer.queue.RenderQueue;
   28.20 +import com.jme3.texture.FrameBuffer;
   28.21 +import com.jme3.util.BufferUtils;
   28.22 +import com.jme3.util.Screenshots;
   28.23 +
   28.24 +/**
   28.25 + * <code>VideoProcessor</code> copies the frames it receives to video. 
   28.26 + * To ensure smooth video at a constant framerate, you should set your 
   28.27 + * application's timer to a new {@link IsoTimer}.  This class will 
   28.28 + * auto-determine the framerate of the video based on the time difference 
   28.29 + * between the first two frames it receives, although you can manually set 
   28.30 + * the framerate by calling <code>setFps(newFramerate)</code>.  Be sure to 
   28.31 + * place this processor *after* any other processors whose effects you want 
   28.32 + * to be included in the output video. You can attach multiple 
   28.33 + * <code>VideoProcessor</code>s to the same <code>ViewPort</code>.
   28.34 + * 
   28.35 + * For example,
   28.36 + * <code>
   28.37 + * someViewPort.addProcessor(new VideoProcessor(file1));
   28.38 + * someViewPort.addProcessor(someShadowRenderer);
   28.39 + * someViewPort.addProcessor(new VideoProcessor(file2));
   28.40 + * </code>
   28.41 + * 
   28.42 + * will output a video without shadows to <code>file1</code> and a video 
   28.43 + * with shadows to <code>file2</code>
   28.44 + * 
   28.45 + * @author Robert McIntyre
   28.46 + *
   28.47 + */
   28.48 +
   28.49 +public abstract class AbstractVideoRecorder 
   28.50 +	implements SceneProcessor, IVideoRecorder, AppState{
   28.51 +
   28.52 +	final File output;
   28.53 +	Camera camera;
   28.54 +	int width;
   28.55 +	int height;
   28.56 +	String targetFileName;
   28.57 +	FrameBuffer frameBuffer;
   28.58 +	Double fps = null;
   28.59 +	RenderManager renderManager;
   28.60 +	ByteBuffer byteBuffer;
   28.61 +	BufferedImage rawFrame;
   28.62 +	boolean isInitilized = false;
   28.63 +	boolean paused = false;
   28.64 +	
   28.65 +	public AbstractVideoRecorder(File output) throws IOException {
   28.66 +		this.output = output;
   28.67 +		this.targetFileName = this.output.getCanonicalPath();	
   28.68 +	}
   28.69 +	
   28.70 +		
   28.71 +	public double getFps() {return this.fps;}
   28.72 +	
   28.73 +	public AbstractVideoRecorder setFps(double fps) {
   28.74 +		this.fps = fps;
   28.75 +		return this;
   28.76 +	}
   28.77 +	
   28.78 +	public void initialize(RenderManager rm, ViewPort viewPort) {
   28.79 +		Camera camera = viewPort.getCamera();
   28.80 +		this.width = camera.getWidth();
   28.81 +		this.height = camera.getHeight();
   28.82 +				
   28.83 +		rawFrame = new BufferedImage(width, height, 
   28.84 +				BufferedImage.TYPE_4BYTE_ABGR);		
   28.85 +		byteBuffer = BufferUtils.createByteBuffer(width * height * 4 );
   28.86 +		this.renderManager = rm;
   28.87 +		this.isInitilized = true;
   28.88 +	}
   28.89 +
   28.90 +	public void reshape(ViewPort vp, int w, int h) {}
   28.91 +	
   28.92 +	public boolean isInitialized() {return this.isInitilized;}
   28.93 +
   28.94 +	public void preFrame(float tpf) {
   28.95 +		if (null == this.fps){
   28.96 +			this.setFps(1.0 / tpf);}
   28.97 +	}	
   28.98 +	
   28.99 +	public void postQueue(RenderQueue rq) {}
  28.100 +
  28.101 +	public void postFrame(FrameBuffer out) {
  28.102 +		if (!this.paused){
  28.103 +			byteBuffer.clear();
  28.104 +			renderManager.getRenderer().readFrameBuffer(out, byteBuffer);
  28.105 +			Screenshots.convertScreenShot(byteBuffer, rawFrame);
  28.106 +			record(rawFrame);
  28.107 +		}
  28.108 +	}
  28.109 +			
  28.110 +	public void cleanup(){
  28.111 +		this.pause();
  28.112 +		this.finish();
  28.113 +	};
  28.114 +	
  28.115 +	public void pause(){
  28.116 +		this.paused = true;
  28.117 +	}
  28.118 +	
  28.119 +	public void start(){
  28.120 +		this.paused = false;
  28.121 +	}
  28.122 +
  28.123 +	// methods from AppState
  28.124 +	public void initialize(AppStateManager stateManager, Application app) {}
  28.125 +
  28.126 +	public void setEnabled(boolean active) {
  28.127 +		if (active) {this.start();}
  28.128 +		else {this.pause();}
  28.129 +	}
  28.130 +
  28.131 +	public boolean isEnabled() {
  28.132 +		return this.paused;
  28.133 +	}
  28.134 +
  28.135 +	public void stateAttached(AppStateManager stateManager) {}
  28.136 +
  28.137 +
  28.138 +	public void stateDetached(AppStateManager stateManager) {
  28.139 +		this.pause();
  28.140 +		this.finish();
  28.141 +	}
  28.142 +
  28.143 +	public void update(float tpf) {}	
  28.144 +	public void render(RenderManager rm) {}
  28.145 +	public void postRender() {}
  28.146 +	
  28.147 +}
    29.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    29.2 +++ b/src/com/aurellem/capture/video/DataChunkOutputStream.java	Wed Oct 26 08:54:12 2011 -0700
    29.3 @@ -0,0 +1,217 @@
    29.4 +/**
    29.5 + * @(#)DataChunkOutputStream.java  1.1  2011-01-17
    29.6 + *
    29.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
    29.8 + * All rights reserved.
    29.9 + *
   29.10 + * You may not use, copy or modify this file, except in compliance with the
   29.11 + * license agreement you entered into with Werner Randelshofer.
   29.12 + * For details see accompanying license terms.
   29.13 + */
   29.14 +package com.aurellem.capture.video;
   29.15 +
   29.16 +import java.io.*;
   29.17 +
   29.18 +/**
   29.19 + * This output stream filter supports common data types used inside
   29.20 + * of AVI RIFF Data Chunks.
   29.21 + *
   29.22 + * @author Werner Randelshofer
   29.23 + * @version 1.1 2011-01-17 Adds functionality for blocking flush and close.
   29.24 + * <br>1.0.1 2010-04-05 Removed unused constants.
   29.25 + * <br>1.0  2008-08-11 Created.
   29.26 + */
   29.27 +public class DataChunkOutputStream extends FilterOutputStream {
   29.28 +
   29.29 +    /**
   29.30 +     * The number of bytes written to the data output stream so far. 
   29.31 +     * If this counter overflows, it will be wrapped to Integer.MAX_VALUE.
   29.32 +     */
   29.33 +    protected long written;
   29.34 +
   29.35 +    /** Whether flush and close request shall be forwarded to underlying stream.*/
   29.36 +    private boolean forwardFlushAndClose;
   29.37 +
   29.38 +    public DataChunkOutputStream(OutputStream out) {
   29.39 +        this(out,true);
   29.40 +    }
   29.41 +    public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) {
   29.42 +        super(out);
   29.43 +        this.forwardFlushAndClose=forwardFlushAndClose;
   29.44 +    }
   29.45 +
   29.46 +    /**
   29.47 +     * Writes an chunk type identifier (4 bytes).
   29.48 +     * @param s A string with a length of 4 characters.
   29.49 +     */
   29.50 +    public void writeType(String s) throws IOException {
   29.51 +        if (s.length() != 4) {
   29.52 +            throw new IllegalArgumentException("type string must have 4 characters");
   29.53 +        }
   29.54 +
   29.55 +        try {
   29.56 +            out.write(s.getBytes("ASCII"), 0, 4);
   29.57 +            incCount(4);
   29.58 +        } catch (UnsupportedEncodingException e) {
   29.59 +            throw new InternalError(e.toString());
   29.60 +        }
   29.61 +    }
   29.62 +
   29.63 +    /**
   29.64 +     * Writes out a <code>byte</code> to the underlying output stream as 
   29.65 +     * a 1-byte value. If no exception is thrown, the counter 
   29.66 +     * <code>written</code> is incremented by <code>1</code>.
   29.67 +     *
   29.68 +     * @param      v   a <code>byte</code> value to be written.
   29.69 +     * @exception  IOException  if an I/O error occurs.
   29.70 +     * @see        java.io.FilterOutputStream#out
   29.71 +     */
   29.72 +    public final void writeByte(int v) throws IOException {
   29.73 +        out.write(v);
   29.74 +        incCount(1);
   29.75 +    }
   29.76 +
   29.77 +    /**
   29.78 +     * Writes <code>len</code> bytes from the specified byte array 
   29.79 +     * starting at offset <code>off</code> to the underlying output stream. 
   29.80 +     * If no exception is thrown, the counter <code>written</code> is 
   29.81 +     * incremented by <code>len</code>.
   29.82 +     *
   29.83 +     * @param      b     the data.
   29.84 +     * @param      off   the start offset in the data.
   29.85 +     * @param      len   the number of bytes to write.
   29.86 +     * @exception  IOException  if an I/O error occurs.
   29.87 +     * @see        java.io.FilterOutputStream#out
   29.88 +     */
   29.89 +    @Override
   29.90 +    public synchronized void write(byte b[], int off, int len)
   29.91 +            throws IOException {
   29.92 +        out.write(b, off, len);
   29.93 +        incCount(len);
   29.94 +    }
   29.95 +
   29.96 +    /**
   29.97 +     * Writes the specified byte (the low eight bits of the argument 
   29.98 +     * <code>b</code>) to the underlying output stream. If no exception 
   29.99 +     * is thrown, the counter <code>written</code> is incremented by 
  29.100 +     * <code>1</code>.
  29.101 +     * <p>
  29.102 +     * Implements the <code>write</code> method of <code>OutputStream</code>.
  29.103 +     *
  29.104 +     * @param      b   the <code>byte</code> to be written.
  29.105 +     * @exception  IOException  if an I/O error occurs.
  29.106 +     * @see        java.io.FilterOutputStream#out
  29.107 +     */
  29.108 +    @Override
  29.109 +    public synchronized void write(int b) throws IOException {
  29.110 +        out.write(b);
  29.111 +        incCount(1);
  29.112 +    }
  29.113 +
  29.114 +    /**
  29.115 +     * Writes an <code>int</code> to the underlying output stream as four
  29.116 +     * bytes, high byte first. If no exception is thrown, the counter 
  29.117 +     * <code>written</code> is incremented by <code>4</code>.
  29.118 +     *
  29.119 +     * @param      v   an <code>int</code> to be written.
  29.120 +     * @exception  IOException  if an I/O error occurs.
  29.121 +     * @see        java.io.FilterOutputStream#out
  29.122 +     */
  29.123 +    public void writeInt(int v) throws IOException {
  29.124 +        out.write((v >>> 0) & 0xff);
  29.125 +        out.write((v >>> 8) & 0xff);
  29.126 +        out.write((v >>> 16) & 0xff);
  29.127 +        out.write((v >>> 24) & 0xff);
  29.128 +        incCount(4);
  29.129 +    }
  29.130 +
  29.131 +    /**
  29.132 +     * Writes an unsigned 32 bit integer value.
  29.133 +     * 
  29.134 +     * @param v The value
  29.135 +     * @throws java.io.IOException
  29.136 +     */
  29.137 +    public void writeUInt(long v) throws IOException {
  29.138 +        out.write((int) ((v >>> 0) & 0xff));
  29.139 +        out.write((int) ((v >>> 8) & 0xff));
  29.140 +        out.write((int) ((v >>> 16) & 0xff));
  29.141 +        out.write((int) ((v >>> 24) & 0xff));
  29.142 +        incCount(4);
  29.143 +    }
  29.144 +
  29.145 +    /**
  29.146 +     * Writes a signed 16 bit integer value.
  29.147 +     * 
  29.148 +     * @param v The value
  29.149 +     * @throws java.io.IOException
  29.150 +     */
  29.151 +    public void writeShort(int v) throws IOException {
  29.152 +        out.write((int) ((v >>> 0) & 0xff));
  29.153 +        out.write((int) ((v >> 8) & 0xff));
  29.154 +        incCount(2);
  29.155 +    }
  29.156 +
  29.157 +    public void writeLong(long v) throws IOException {
  29.158 +        out.write((int) (v >>> 0) & 0xff);
  29.159 +        out.write((int) (v >>> 8) & 0xff);
  29.160 +        out.write((int) (v >>> 16) & 0xff);
  29.161 +        out.write((int) (v >>> 24) & 0xff);
  29.162 +        out.write((int) (v >>> 32) & 0xff);
  29.163 +        out.write((int) (v >>> 40) & 0xff);
  29.164 +        out.write((int) (v >>> 48) & 0xff);
  29.165 +        out.write((int) (v >>> 56) & 0xff);
  29.166 +        incCount(8);
  29.167 +    }
  29.168 +
  29.169 +    public void writeUShort(int v) throws IOException {
  29.170 +        out.write((int) ((v >>> 0) & 0xff));
  29.171 +        out.write((int) ((v >> 8) & 0xff));
  29.172 +        incCount(2);
  29.173 +    }
  29.174 +
  29.175 +    /**
  29.176 +     * Increases the written counter by the specified value
  29.177 +     * until it reaches Long.MAX_VALUE.
  29.178 +     */
  29.179 +    protected void incCount(int value) {
  29.180 +        long temp = written + value;
  29.181 +        if (temp < 0) {
  29.182 +            temp = Long.MAX_VALUE;
  29.183 +        }
  29.184 +        written = temp;
  29.185 +    }
  29.186 +
  29.187 +    /**
  29.188 +     * Returns the current value of the counter <code>written</code>, 
  29.189 +     * the number of bytes written to this data output stream so far.
  29.190 +     * If the counter overflows, it will be wrapped to Integer.MAX_VALUE.
  29.191 +     *
  29.192 +     * @return  the value of the <code>written</code> field.
  29.193 +     * @see     java.io.DataOutputStream#written
  29.194 +     */
  29.195 +    public final long size() {
  29.196 +        return written;
  29.197 +    }
  29.198 +    
  29.199 +    /**
  29.200 +     * Sets the value of the counter <code>written</code> to 0.
  29.201 +     */
  29.202 +    public void clearCount() {
  29.203 +        written = 0;
  29.204 +    }
  29.205 +
  29.206 +    @Override
  29.207 +    public void close() throws IOException {
  29.208 +        if (forwardFlushAndClose) {
  29.209 +        super.close();
  29.210 +        }
  29.211 +    }
  29.212 +    
  29.213 +    @Override
  29.214 +    public void flush() throws IOException {
  29.215 +        if (forwardFlushAndClose) {
  29.216 +        super.flush();
  29.217 +        }
  29.218 +    }
  29.219 +
  29.220 +}
    30.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    30.2 +++ b/src/com/aurellem/capture/video/IVideoRecorder.java	Wed Oct 26 08:54:12 2011 -0700
    30.3 @@ -0,0 +1,21 @@
    30.4 +package com.aurellem.capture.video;
    30.5 +
    30.6 +import java.awt.image.BufferedImage;
    30.7 +
    30.8 +public interface IVideoRecorder{
    30.9 +
   30.10 +	void record(BufferedImage image);
   30.11 +	
   30.12 +	void pause();
   30.13 +	
   30.14 +	void start();
   30.15 +	
   30.16 +	/**
   30.17 +	 * closes the video file, writing appropriate headers, trailers, etc.
   30.18 +	 * After this is called, no more recording can be done.
   30.19 +	 */
   30.20 +	void finish();
   30.21 +	
   30.22 +}
   30.23 +
   30.24 +
    31.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    31.2 +++ b/src/com/aurellem/capture/video/ImageOutputStreamAdapter.java	Wed Oct 26 08:54:12 2011 -0700
    31.3 @@ -0,0 +1,144 @@
    31.4 +/*
    31.5 + * @(#)ImageOutputStreamAdapter.java  1.1  2011-01-07
    31.6 + *
    31.7 + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland.
    31.8 + * All rights reserved.
    31.9 + *
   31.10 + * You may not use, copy or modify this file, except in compliance with the
   31.11 + * license agreement you entered into with Werner Randelshofer.
   31.12 + * For details see accompanying license terms.
   31.13 + */
   31.14 +package com.aurellem.capture.video;
   31.15 +
   31.16 +import java.io.FilterOutputStream;
   31.17 +import java.io.IOException;
   31.18 +import java.io.OutputStream;
   31.19 +import javax.imageio.stream.ImageOutputStream;
   31.20 +
   31.21 +/**
   31.22 + * Adapts an {@code ImageOutputStream} for classes requiring an
   31.23 + * {@code OutputStream}.
   31.24 + *
   31.25 + * @author Werner Randelshofer
   31.26 + * @version 1.1 2011-01-07 Fixes performance.
   31.27 + * <br>1.0 2010-12-26 Created.
   31.28 + */
   31.29 +public class ImageOutputStreamAdapter extends OutputStream {
   31.30 +
   31.31 +    /**
   31.32 +     * The underlying output stream to be filtered.
   31.33 +     */
   31.34 +    protected ImageOutputStream out;
   31.35 +
   31.36 +    /**
   31.37 +     * Creates an output stream filter built on top of the specified
   31.38 +     * underlying output stream.
   31.39 +     *
   31.40 +     * @param   out   the underlying output stream to be assigned to
   31.41 +     *                the field <tt>this.out</tt> for later use, or
   31.42 +     *                <code>null</code> if this instance is to be
   31.43 +     *                created without an underlying stream.
   31.44 +     */
   31.45 +    public ImageOutputStreamAdapter(ImageOutputStream out) {
   31.46 +        this.out = out;
   31.47 +    }
   31.48 +
   31.49 +    /**
   31.50 +     * Writes the specified <code>byte</code> to this output stream.
   31.51 +     * <p>
   31.52 +     * The <code>write</code> method of <code>FilterOutputStream</code>
   31.53 +     * calls the <code>write</code> method of its underlying output stream,
   31.54 +     * that is, it performs <tt>out.write(b)</tt>.
   31.55 +     * <p>
   31.56 +     * Implements the abstract <tt>write</tt> method of <tt>OutputStream</tt>.
   31.57 +     *
   31.58 +     * @param      b   the <code>byte</code>.
   31.59 +     * @exception  IOException  if an I/O error occurs.
   31.60 +     */
   31.61 +    @Override
   31.62 +    public void write(int b) throws IOException {
   31.63 +        out.write(b);
   31.64 +    }
   31.65 +
   31.66 +    /**
   31.67 +     * Writes <code>b.length</code> bytes to this output stream.
   31.68 +     * <p>
   31.69 +     * The <code>write</code> method of <code>FilterOutputStream</code>
   31.70 +     * calls its <code>write</code> method of three arguments with the
   31.71 +     * arguments <code>b</code>, <code>0</code>, and
   31.72 +     * <code>b.length</code>.
   31.73 +     * <p>
   31.74 +     * Note that this method does not call the one-argument
   31.75 +     * <code>write</code> method of its underlying stream with the single
   31.76 +     * argument <code>b</code>.
   31.77 +     *
   31.78 +     * @param      b   the data to be written.
   31.79 +     * @exception  IOException  if an I/O error occurs.
   31.80 +     * @see        java.io.FilterOutputStream#write(byte[], int, int)
   31.81 +     */
   31.82 +    @Override
   31.83 +    public void write(byte b[]) throws IOException {
   31.84 +        write(b, 0, b.length);
   31.85 +    }
   31.86 +
   31.87 +    /**
   31.88 +     * Writes <code>len</code> bytes from the specified
   31.89 +     * <code>byte</code> array starting at offset <code>off</code> to
   31.90 +     * this output stream.
   31.91 +     * <p>
   31.92 +     * The <code>write</code> method of <code>FilterOutputStream</code>
   31.93 +     * calls the <code>write</code> method of one argument on each
   31.94 +     * <code>byte</code> to output.
   31.95 +     * <p>
   31.96 +     * Note that this method does not call the <code>write</code> method
   31.97 +     * of its underlying input stream with the same arguments. Subclasses
   31.98 +     * of <code>FilterOutputStream</code> should provide a more efficient
   31.99 +     * implementation of this method.
  31.100 +     *
  31.101 +     * @param      b     the data.
  31.102 +     * @param      off   the start offset in the data.
  31.103 +     * @param      len   the number of bytes to write.
  31.104 +     * @exception  IOException  if an I/O error occurs.
  31.105 +     * @see        java.io.FilterOutputStream#write(int)
  31.106 +     */
  31.107 +    @Override
  31.108 +    public void write(byte b[], int off, int len) throws IOException {
  31.109 +        out.write(b,off,len);
  31.110 +    }
  31.111 +
  31.112 +    /**
  31.113 +     * Flushes this output stream and forces any buffered output bytes
  31.114 +     * to be written out to the stream.
  31.115 +     * <p>
  31.116 +     * The <code>flush</code> method of <code>FilterOutputStream</code>
  31.117 +     * calls the <code>flush</code> method of its underlying output stream.
  31.118 +     *
  31.119 +     * @exception  IOException  if an I/O error occurs.
  31.120 +     * @see        java.io.FilterOutputStream#out
  31.121 +     */
  31.122 +    @Override
  31.123 +    public void flush() throws IOException {
  31.124 +        out.flush();
  31.125 +    }
  31.126 +
  31.127 +    /**
  31.128 +     * Closes this output stream and releases any system resources
  31.129 +     * associated with the stream.
  31.130 +     * <p>
  31.131 +     * The <code>close</code> method of <code>FilterOutputStream</code>
  31.132 +     * calls its <code>flush</code> method, and then calls the
  31.133 +     * <code>close</code> method of its underlying output stream.
  31.134 +     *
  31.135 +     * @exception  IOException  if an I/O error occurs.
  31.136 +     * @see        java.io.FilterOutputStream#flush()
  31.137 +     * @see        java.io.FilterOutputStream#out
  31.138 +     */
  31.139 +    @Override
  31.140 +    public void close() throws IOException {
  31.141 +        try {
  31.142 +            flush();
  31.143 +        } finally {
  31.144 +            out.close();
  31.145 +        }
  31.146 +    }
  31.147 +}
    32.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    32.2 +++ b/src/com/aurellem/capture/video/MicrosoftRLEEncoder.java	Wed Oct 26 08:54:12 2011 -0700
    32.3 @@ -0,0 +1,402 @@
    32.4 +/*
    32.5 + * @(#)AppleRLEEncoder.java  1.1.1  2011-01-17
    32.6 + *
    32.7 + * Copyright © 2011 Werner Randelshofer, Immensee, Switzerland.
    32.8 + * All rights reserved.
    32.9 + *
   32.10 + * You may not use, copy or modify this file, except in compliance with the
   32.11 + * license agreement you entered into with Werner Randelshofer.
   32.12 + * For details see accompanying license terms.
   32.13 + */
   32.14 +package com.aurellem.capture.video;
   32.15 +
   32.16 +import java.io.ByteArrayOutputStream;
   32.17 +import java.io.IOException;
   32.18 +import java.io.OutputStream;
   32.19 +import java.util.Arrays;
   32.20 +
   32.21 +import com.aurellem.capture.audio.SeekableByteArrayOutputStream;
   32.22 +
   32.23 +/**
   32.24 + * Implements the run length encoding of the Microsoft RLE format.
   32.25 + * <p>
   32.26 + * Each line of a frame is compressed individually. A line consists of two-byte
   32.27 + * op-codes optionally followed by data. The end of the line is marked with
   32.28 + * the EOL op-code.
   32.29 + * <p>
   32.30 + * The following op-codes are supported:
   32.31 + * <ul>
   32.32 + * <li>{@code 0x00 0x00}
   32.33 + * <br>Marks the end of a line.</li>
   32.34 + *
   32.35 + * <li>{@code  0x00 0x01}
   32.36 + * <br>Marks the end of the bitmap.</li>
   32.37 + *
   32.38 + * <li>{@code 0x00 0x02 x y}
   32.39 + * <br> Marks a delta (skip). {@code x} and {@code y}
   32.40 + * indicate the horizontal and vertical offset from the current position.
   32.41 + * {@code x} and {@code y} are unsigned 8-bit values.</li>
   32.42 + *
   32.43 + * <li>{@code 0x00 n data{n} 0x00?}
   32.44 + * <br> Marks a literal run. {@code n}
   32.45 + * gives the number of data bytes that follow. {@code n} must be between 3 and
   32.46 + * 255. If n is odd, a pad byte with the value 0x00 must be added.
   32.47 + * </li>
   32.48 + * <li>{@code n data}
   32.49 + * <br> Marks a repetition. {@code n}
   32.50 + * gives the number of times the data byte is repeated. {@code n} must be
   32.51 + * between 1 and 255.
   32.52 + * </li>
   32.53 + * </ul>
   32.54 + * Example:
   32.55 + * <pre>
   32.56 + * Compressed data         Expanded data
   32.57 + *
   32.58 + * 03 04                   04 04 04
   32.59 + * 05 06                   06 06 06 06 06
   32.60 + * 00 03 45 56 67 00       45 56 67
   32.61 + * 02 78                   78 78
   32.62 + * 00 02 05 01             Move 5 right and 1 down
   32.63 + * 02 78                   78 78
   32.64 + * 00 00                   End of line
   32.65 + * 09 1E                   1E 1E 1E 1E 1E 1E 1E 1E 1E
   32.66 + * 00 01                   End of RLE bitmap
   32.67 + * </pre>
   32.68 + *
   32.69 + * References:<br/>
   32.70 + * <a href="http://wiki.multimedia.cx/index.php?title=Microsoft_RLE">http://wiki.multimedia.cx/index.php?title=Microsoft_RLE</a><br>
   32.71 + *
   32.72 + * @author Werner Randelshofer
   32.73 + * @version 1.1.1 2011-01-17 Removes unused imports.
   32.74 + * <br>1.1 2011-01-07 Improves performance.
   32.75 + * <br>1.0 2011-01-05 Created.
   32.76 + */
   32.77 +public class MicrosoftRLEEncoder {
   32.78 +
   32.79 +    private SeekableByteArrayOutputStream tempSeek=new SeekableByteArrayOutputStream();
   32.80 +    private DataChunkOutputStream temp=new DataChunkOutputStream(tempSeek);
   32.81 +
   32.82 +    /** Encodes a 8-bit key frame.
   32.83 +     *
   32.84 +     * @param temp The output stream. Must be set to Big-Endian.
   32.85 +     * @param data The image data.
   32.86 +     * @param offset The offset to the first pixel in the data array.
   32.87 +     * @param length The width of the image in data elements.
   32.88 +     * @param step The number to add to offset to get to the next scanline.
   32.89 +     */
   32.90 +    public void writeKey8(OutputStream out, byte[] data, int offset, int length, int step, int height)
   32.91 +            throws IOException {
   32.92 +        tempSeek.reset();
   32.93 +        int ymax = offset + height * step;
   32.94 +        int upsideDown = ymax-step+offset;
   32.95 +
   32.96 +        // Encode each scanline separately
   32.97 +        for (int y = offset; y < ymax; y += step) {
   32.98 +            int xy = upsideDown-y;
   32.99 +            int xymax = xy + length;
  32.100 +
  32.101 +            int literalCount = 0;
  32.102 +            int repeatCount = 0;
  32.103 +            for (; xy < xymax; ++xy) {
  32.104 +                // determine repeat count
  32.105 +                byte v = data[xy];
  32.106 +                for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) {
  32.107 +                    if (data[xy] != v) {
  32.108 +                        break;
  32.109 +                    }
  32.110 +                }
  32.111 +                xy -= repeatCount;
  32.112 +                if (repeatCount < 3) {
  32.113 +                    literalCount++;
  32.114 +                    if (literalCount == 254) {
  32.115 +                        temp.write(0);temp.write(literalCount); // Literal OP-code
  32.116 +                        temp.write(data, xy - literalCount + 1, literalCount);
  32.117 +                        literalCount = 0;
  32.118 +                    }
  32.119 +                } else {
  32.120 +                    if (literalCount > 0) {
  32.121 +                        if (literalCount < 3) {
  32.122 +                            for (; literalCount > 0; --literalCount) {
  32.123 +                                temp.write(1); // Repeat OP-code
  32.124 +                                temp.write(data[xy - literalCount]);
  32.125 +                            }
  32.126 +                        } else {
  32.127 +                            temp.write(0);temp.write(literalCount); // Literal OP-code
  32.128 +                            temp.write(data, xy - literalCount, literalCount);
  32.129 +                            if (literalCount % 2 == 1) {
  32.130 +                                temp.write(0); // pad byte
  32.131 +                            }
  32.132 +                            literalCount = 0;
  32.133 +                        }
  32.134 +                    }
  32.135 +                    temp.write(repeatCount); // Repeat OP-code
  32.136 +                    temp.write(v);
  32.137 +                    xy += repeatCount - 1;
  32.138 +                }
  32.139 +            }
  32.140 +
  32.141 +            // flush literal run
  32.142 +            if (literalCount > 0) {
  32.143 +                if (literalCount < 3) {
  32.144 +                    for (; literalCount > 0; --literalCount) {
  32.145 +                        temp.write(1); // Repeat OP-code
  32.146 +                        temp.write(data[xy - literalCount]);
  32.147 +                    }
  32.148 +                } else {
  32.149 +                    temp.write(0);temp.write(literalCount);
  32.150 +                    temp.write(data, xy - literalCount, literalCount);
  32.151 +                    if (literalCount % 2 == 1) {
  32.152 +                        temp.write(0); // pad byte
  32.153 +                    }
  32.154 +                }
  32.155 +                literalCount = 0;
  32.156 +            }
  32.157 +
  32.158 +            temp.write(0);temp.write(0x0000);// End of line
  32.159 +        }
  32.160 +        temp.write(0);temp.write(0x0001);// End of bitmap
  32.161 +        tempSeek.toOutputStream(out);
  32.162 +    }
  32.163 +
  32.164 +    /** Encodes a 8-bit delta frame.
  32.165 +     *
  32.166 +     * @param temp The output stream. Must be set to Big-Endian.
  32.167 +     * @param data The image data.
  32.168 +     * @param prev The image data of the previous frame.
  32.169 +     * @param offset The offset to the first pixel in the data array.
  32.170 +     * @param length The width of the image in data elements.
  32.171 +     * @param step The number to add to offset to get to the next scanline.
  32.172 +     */
  32.173 +    public void writeDelta8(OutputStream out, byte[] data, byte[] prev, int offset, int length, int step, int height)
  32.174 +            throws IOException {
  32.175 +
  32.176 +tempSeek.reset();
  32.177 +        // Determine whether we can skip lines at the beginning
  32.178 +        int ymin;
  32.179 +        int ymax = offset + height * step;
  32.180 +        int upsideDown = ymax-step+offset;
  32.181 +        scanline:
  32.182 +        for (ymin = offset; ymin < ymax; ymin += step) {
  32.183 +            int xy = upsideDown-ymin;
  32.184 +            int xymax = xy + length;
  32.185 +            for (; xy < xymax; ++xy) {
  32.186 +                if (data[xy] != prev[xy]) {
  32.187 +                    break scanline;
  32.188 +                }
  32.189 +            }
  32.190 +        }
  32.191 +
  32.192 +        if (ymin == ymax) {
  32.193 +            // => Frame is identical to previous one
  32.194 +            temp.write(0);temp.write(0x0001); // end of bitmap
  32.195 +            return;
  32.196 +        }
  32.197 +
  32.198 +        if (ymin > offset) {
  32.199 +            int verticalOffset = ymin / step;
  32.200 +            while (verticalOffset > 255) {
  32.201 +                temp.write(0);temp.write(0x0002); // Skip OP-code
  32.202 +                temp.write(0); // horizontal offset
  32.203 +                temp.write(255); // vertical offset
  32.204 +                verticalOffset -= 255;
  32.205 +            }
  32.206 +            if (verticalOffset == 1) {
  32.207 +                temp.write(0);temp.write(0x0000); // End of line OP-code
  32.208 +            } else {
  32.209 +                temp.write(0);temp.write(0x0002); // Skip OP-code
  32.210 +                temp.write(0); // horizontal offset
  32.211 +                temp.write(verticalOffset); // vertical offset
  32.212 +            }
  32.213 +        }
  32.214 +
  32.215 +
  32.216 +        // Determine whether we can skip lines at the end
  32.217 +        scanline:
  32.218 +        for (; ymax > ymin; ymax -= step) {
  32.219 +            int xy = upsideDown-ymax+step;
  32.220 +            int xymax = xy + length;
  32.221 +            for (; xy < xymax; ++xy) {
  32.222 +                if (data[xy] != prev[xy]) {
  32.223 +                    break scanline;
  32.224 +                }
  32.225 +            }
  32.226 +        }
  32.227 +        //System.out.println("MicrosoftRLEEncoder ymin:" + ymin / step + " ymax" + ymax / step);
  32.228 +
  32.229 +
  32.230 +        // Encode each scanline
  32.231 +        int verticalOffset = 0;
  32.232 +        for (int y = ymin; y < ymax; y += step) {
  32.233 +            int xy = upsideDown-y;
  32.234 +            int xymax = xy + length;
  32.235 +
  32.236 +            // determine skip count
  32.237 +            int skipCount = 0;
  32.238 +            for (; xy < xymax; ++xy, ++skipCount) {
  32.239 +                if (data[xy] != prev[xy]) {
  32.240 +                    break;
  32.241 +                }
  32.242 +            }
  32.243 +            if (skipCount == length) {
  32.244 +                // => the entire line can be skipped
  32.245 +                ++verticalOffset;
  32.246 +                if (verticalOffset == 255) {
  32.247 +                    temp.write(0);temp.write(0x0002); // Skip OP-code
  32.248 +                    temp.write(0); // horizontal offset
  32.249 +                    temp.write(255); // vertical offset
  32.250 +                    verticalOffset = 0;
  32.251 +                }
  32.252 +                continue;
  32.253 +            }
  32.254 +
  32.255 +            if (verticalOffset > 0 || skipCount > 0) {
  32.256 +                if (verticalOffset == 1 && skipCount == 0) {
  32.257 +                    temp.write(0);temp.write(0x0000); // End of line OP-code
  32.258 +                } else {
  32.259 +                    temp.write(0);temp.write(0x0002); // Skip OP-code
  32.260 +                    temp.write(Math.min(255, skipCount)); // horizontal offset
  32.261 +                    skipCount -= 255;
  32.262 +                    temp.write(verticalOffset); // vertical offset
  32.263 +                }
  32.264 +                verticalOffset = 0;
  32.265 +            }
  32.266 +            while (skipCount > 0) {
  32.267 +                temp.write(0);temp.write(0x0002); // Skip OP-code
  32.268 +                temp.write(Math.min(255, skipCount)); // horizontal offset
  32.269 +                temp.write(0); // vertical offset
  32.270 +                skipCount -= 255;
  32.271 +            }
  32.272 +
  32.273 +            int literalCount = 0;
  32.274 +            int repeatCount = 0;
  32.275 +            for (; xy < xymax; ++xy) {
  32.276 +                // determine skip count
  32.277 +                for (skipCount = 0; xy < xymax; ++xy, ++skipCount) {
  32.278 +                    if (data[xy] != prev[xy]) {
  32.279 +                        break;
  32.280 +                    }
  32.281 +                }
  32.282 +                xy -= skipCount;
  32.283 +
  32.284 +                // determine repeat count
  32.285 +                byte v = data[xy];
  32.286 +                for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) {
  32.287 +                    if (data[xy] != v) {
  32.288 +                        break;
  32.289 +                    }
  32.290 +                }
  32.291 +                xy -= repeatCount;
  32.292 +
  32.293 +                if (skipCount < 4 && xy + skipCount < xymax && repeatCount < 3) {
  32.294 +                    literalCount++;
  32.295 +                    if (literalCount == 254) {
  32.296 +                        temp.write(0);temp.write(literalCount); // Literal OP-code
  32.297 +                        temp.write(data, xy - literalCount + 1, literalCount);
  32.298 +                        literalCount = 0;
  32.299 +                    }
  32.300 +                } else {
  32.301 +                    if (literalCount > 0) {
  32.302 +                        if (literalCount < 3) {
  32.303 +                            for (; literalCount > 0; --literalCount) {
  32.304 +                                temp.write(1); // Repeat OP-code
  32.305 +                                temp.write(data[xy - literalCount]);
  32.306 +                            }
  32.307 +                        } else {
  32.308 +                            temp.write(0);temp.write(literalCount);
  32.309 +                            temp.write(data, xy - literalCount, literalCount);
  32.310 +                            if (literalCount % 2 == 1) {
  32.311 +                                temp.write(0); // pad byte
  32.312 +                            }
  32.313 +                        }
  32.314 +                        literalCount = 0;
  32.315 +                    }
  32.316 +                    if (xy + skipCount == xymax) {
  32.317 +                        // => we can skip until the end of the line without
  32.318 +                        //    having to write an op-code
  32.319 +                        xy += skipCount - 1;
  32.320 +                    } else if (skipCount >= repeatCount) {
  32.321 +                        while (skipCount > 255) {
  32.322 +                            temp.write(0);temp.write(0x0002); // Skip OP-code
  32.323 +                            temp.write(255);
  32.324 +                            temp.write(0);
  32.325 +                            xy += 255;
  32.326 +                            skipCount -= 255;
  32.327 +                        }
  32.328 +                        temp.write(0);temp.write(0x0002); // Skip OP-code
  32.329 +                        temp.write(skipCount);
  32.330 +                        temp.write(0);
  32.331 +                        xy += skipCount - 1;
  32.332 +                    } else {
  32.333 +                        temp.write(repeatCount); // Repeat OP-code
  32.334 +                        temp.write(v);
  32.335 +                        xy += repeatCount - 1;
  32.336 +                    }
  32.337 +                }
  32.338 +            }
  32.339 +
  32.340 +            // flush literal run
  32.341 +            if (literalCount > 0) {
  32.342 +                if (literalCount < 3) {
  32.343 +                    for (; literalCount > 0; --literalCount) {
  32.344 +                        temp.write(1); // Repeat OP-code
  32.345 +                        temp.write(data[xy - literalCount]);
  32.346 +                    }
  32.347 +                } else {
  32.348 +                    temp.write(0);temp.write(literalCount);
  32.349 +                    temp.write(data, xy - literalCount, literalCount);
  32.350 +                    if (literalCount % 2 == 1) {
  32.351 +                        temp.write(0); // pad byte
  32.352 +                    }
  32.353 +                }
  32.354 +            }
  32.355 +
  32.356 +            temp.write(0);temp.write(0x0000); // End of line OP-code
  32.357 +        }
  32.358 +
  32.359 +        temp.write(0);temp.write(0x0001);// End of bitmap
  32.360 +        tempSeek.toOutputStream(out);
  32.361 +    }
  32.362 +
  32.363 +    public static void main(String[] args) {
  32.364 +        byte[] data = {//
  32.365 +            8, 2, 3, 4, 4, 3,7,7,7, 8,//
  32.366 +            8, 1, 1, 1, 1, 2,7,7,7, 8,//
  32.367 +            8, 0, 2, 0, 0, 0,7,7,7, 8,//
  32.368 +            8, 2, 2, 3, 4, 4,7,7,7, 8,//
  32.369 +            8, 1, 4, 4, 4, 5,7,7,7, 8};
  32.370 +
  32.371 +
  32.372 +        byte[] prev = {//
  32.373 +            8, 3, 3, 3, 3, 3,7,7,7, 8,//
  32.374 +            8, 1, 1, 1, 1, 1,7,7,7, 8, //
  32.375 +            8, 5, 5, 5, 5, 0,7,7,7, 8,//
  32.376 +            8, 2, 2, 0, 0, 0,7,7,7, 8,//
  32.377 +            8, 2, 0, 0, 0, 5,7,7,7, 8};
  32.378 +        ByteArrayOutputStream buf = new ByteArrayOutputStream();
  32.379 +        DataChunkOutputStream out = new DataChunkOutputStream(buf);
  32.380 +        MicrosoftRLEEncoder enc = new MicrosoftRLEEncoder();
  32.381 +
  32.382 +        try {
  32.383 +            enc.writeDelta8(out, data, prev, 1, 8, 10, 5);
  32.384 +            //enc.writeKey8(out, data, 1, 8, 10,5);
  32.385 +            out.close();
  32.386 +
  32.387 +            byte[] result = buf.toByteArray();
  32.388 +            System.out.println("size:" + result.length);
  32.389 +            System.out.println(Arrays.toString(result));
  32.390 +            System.out.print("0x [");
  32.391 +
  32.392 +            for (int i = 0; i < result.length; i++) {
  32.393 +                if (i != 0) {
  32.394 +                    System.out.print(',');
  32.395 +                }
  32.396 +                String hex = "00" + Integer.toHexString(result[i]);
  32.397 +                System.out.print(hex.substring(hex.length() - 2));
  32.398 +            }
  32.399 +            System.out.println(']');
  32.400 +
  32.401 +        } catch (IOException ex) {
  32.402 +            ex.printStackTrace();
  32.403 +        }
  32.404 +    }
  32.405 +}
    33.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    33.2 +++ b/src/com/aurellem/capture/video/XuggleVideoRecorder.java	Wed Oct 26 08:54:12 2011 -0700
    33.3 @@ -0,0 +1,53 @@
    33.4 +package com.aurellem.capture.video;
    33.5 +
    33.6 +
    33.7 +/**
    33.8 + * Handles writing video files using Xuggle.
    33.9 + * 
   33.10 + * 
   33.11 + * @author Robert McIntyre
   33.12 + *
   33.13 + */
   33.14 +/*
   33.15 +public  class XuggleVideoRecorder extends AbstractVideoRecorder{
   33.16 +
   33.17 +
   33.18 +	IMediaWriter writer;
   33.19 +	BufferedImage frame;
   33.20 +	int videoChannel = 0;
   33.21 +	long currentTimeStamp = 0;
   33.22 +	boolean videoReady = false;
   33.23 +	
   33.24 +	
   33.25 +	public XuggleVideoRecorder(File output) throws IOException {super(output);}
   33.26 +	
   33.27 +	public void initVideo(){
   33.28 +		this.frame = new BufferedImage(
   33.29 +				width, height,
   33.30 +				BufferedImage.TYPE_3BYTE_BGR);
   33.31 +		this.writer = ToolFactory.makeWriter(this.targetFileName);
   33.32 +		writer.addVideoStream(videoChannel, 
   33.33 +				0, IRational.make(fps), 
   33.34 +				width, height);
   33.35 +		this.videoReady = true;
   33.36 +	}
   33.37 +
   33.38 +		
   33.39 +	public void record(BufferedImage rawFrame) {
   33.40 +		if (!this.videoReady){initVideo();}
   33.41 +		// convert the Image into the form that Xuggle likes.
   33.42 +		this.frame.getGraphics().drawImage(rawFrame, 0, 0, null);
   33.43 +		writer.encodeVideo(videoChannel, 
   33.44 +			frame,
   33.45 +			currentTimeStamp, TimeUnit.NANOSECONDS);
   33.46 +		
   33.47 +		currentTimeStamp += (long) (1000000000.0 / fps);
   33.48 +	}
   33.49 +
   33.50 +	public void finish() {
   33.51 +		writer.close();
   33.52 +	}
   33.53 +	
   33.54 +}
   33.55 +
   33.56 +*/
    34.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    34.2 +++ b/test.sh	Wed Oct 26 08:54:12 2011 -0700
    34.3 @@ -0,0 +1,11 @@
    34.4 +
    34.5 +sound=/home/r/tmp/data1.wav
    34.6 +
    34.7 +if [ -e $sound ] 
    34.8 +then
    34.9 +    aplay /home/r/tmp/data1.wav 
   34.10 +    rm /home/r/tmp/data1.wav 
   34.11 +else
   34.12 +    echo "$sound does not exist."
   34.13 +fi
   34.14 +