diff src/ca/randelshofer/AVIOutputStream.java @ 10:4c5fc53778c1

moved randelshofer stuff to rightfull place, enabled XuggleVideoRecorder
author Robert McIntyre <rlm@mit.edu>
date Wed, 26 Oct 2011 09:38:27 -0700
parents
children 784a3f4e6202
line wrap: on
line diff
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/ca/randelshofer/AVIOutputStream.java	Wed Oct 26 09:38:27 2011 -0700
     1.3 @@ -0,0 +1,1515 @@
     1.4 +/**
     1.5 + * @(#)AVIOutputStream.java  1.5.1  2011-01-17
     1.6 + *
     1.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
     1.8 + * All rights reserved.
     1.9 + *
    1.10 + * You may not use, copy or modify this file, except in compliance with the
    1.11 + * license agreement you entered into with Werner Randelshofer.
    1.12 + * For details see accompanying license terms.
    1.13 + */
    1.14 +package ca.randelshofer;
    1.15 +
    1.16 +import java.awt.Dimension;
    1.17 +import java.awt.image.BufferedImage;
    1.18 +import java.awt.image.DataBufferByte;
    1.19 +import java.awt.image.IndexColorModel;
    1.20 +import java.awt.image.WritableRaster;
    1.21 +import java.io.File;
    1.22 +import java.io.FileInputStream;
    1.23 +import java.io.IOException;
    1.24 +import java.io.InputStream;
    1.25 +import java.io.OutputStream;
    1.26 +import java.util.Arrays;
    1.27 +import java.util.Date;
    1.28 +import java.util.LinkedList;
    1.29 +
    1.30 +import javax.imageio.IIOImage;
    1.31 +import javax.imageio.ImageIO;
    1.32 +import javax.imageio.ImageWriteParam;
    1.33 +import javax.imageio.ImageWriter;
    1.34 +import javax.imageio.stream.FileImageOutputStream;
    1.35 +import javax.imageio.stream.ImageOutputStream;
    1.36 +import javax.imageio.stream.MemoryCacheImageOutputStream;
    1.37 +
    1.38 +/**
    1.39 + * This class supports writing of images into an AVI 1.0 video file.
    1.40 + * <p>
    1.41 + * The images are written as video frames.
    1.42 + * <p>
    1.43 + * Video frames can be encoded with one of the following formats:
    1.44 + * <ul>
    1.45 + * <li>JPEG</li>
    1.46 + * <li>PNG</li>
    1.47 + * <li>RAW</li>
    1.48 + * <li>RLE</li>
    1.49 + * </ul>
    1.50 + * All frames must have the same format.
    1.51 + * When JPG is used each frame can have an individual encoding quality.
    1.52 + * <p>
    1.53 + * All frames in an AVI file must have the same duration. The duration can
    1.54 + * be set by setting an appropriate pair of values using methods
    1.55 + * {@link #setFrameRate} and {@link #setTimeScale}.
    1.56 + * <p>
    1.57 + * The length of an AVI 1.0 file is limited to 1 GB.
    1.58 + * This class supports lengths of up to 4 GB, but such files may not work on
    1.59 + * all players.
    1.60 + * <p>
    1.61 + * For detailed information about the AVI RIFF file format see:<br>
    1.62 + * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
    1.63 + * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
    1.64 + * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
    1.65 + *
    1.66 + * @author Werner Randelshofer
    1.67 + * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
    1.68 + * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
    1.69 + * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
    1.70 + * in "idx1" chunk.
    1.71 + * <br>1.3.2 2010-12-27 File size limit is 1 GB.
    1.72 + * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
    1.73 + * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
    1.74 + * Added method getVideoDimension().
    1.75 + * <br>1.2 2009-08-29 Adds support for RAW video format.
    1.76 + * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
    1.77 + * chunk. Changed the API to reflect that AVI works with frame rates instead of
    1.78 + * with frame durations.
    1.79 + * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
    1.80 + * encoded video.
    1.81 + * <br>1.0 2008-08-11 Created.
    1.82 + */
    1.83 +public class AVIOutputStream {
    1.84 +
    1.85 +    /**
    1.86 +     * Underlying output stream.
    1.87 +     */
    1.88 +    private ImageOutputStream out;
    1.89 +    /** The offset of the QuickTime stream in the underlying ImageOutputStream.
    1.90 +     * Normally this is 0 unless the underlying stream already contained data
    1.91 +     * when it was passed to the constructor.
    1.92 +     */
    1.93 +    private long streamOffset;
    1.94 +    /** Previous frame for delta compression. */
    1.95 +    
    1.96 +    /**
    1.97 +     * Supported video encodings.
    1.98 +     */
    1.99 +    public static enum VideoFormat {
   1.100 +
   1.101 +        RAW, RLE, JPG, PNG;
   1.102 +    }
   1.103 +    /**
   1.104 +     * Current video formats.
   1.105 +     */
   1.106 +    private VideoFormat videoFormat;
   1.107 +    /**
   1.108 +     * Quality of JPEG encoded video frames.
   1.109 +     */
   1.110 +    private float quality = 0.9f;
   1.111 +    /**
   1.112 +     * Width of the video frames. All frames must have the same width.
   1.113 +     * The value -1 is used to mark unspecified width.
   1.114 +     */
   1.115 +    private int imgWidth = -1;
   1.116 +    /**
   1.117 +     * Height of the video frames. All frames must have the same height.
   1.118 +     * The value -1 is used to mark unspecified height.
   1.119 +     */
   1.120 +    private int imgHeight = -1;
   1.121 +    /** Number of bits per pixel. */
   1.122 +    private int imgDepth = 24;
   1.123 +    /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
   1.124 +    private IndexColorModel palette;
   1.125 +    private IndexColorModel previousPalette;
   1.126 +    /** Video encoder. */
   1.127 +    
   1.128 +    /**
   1.129 +     * The timeScale of the movie.
   1.130 +     * <p>
   1.131 +     * Used with frameRate to specify the time scale that this stream will use.
   1.132 +     * Dividing frameRate by timeScale gives the number of samples per second.
   1.133 +     * For video streams, this is the frame rate. For audio streams, this rate
   1.134 +     * corresponds to the time needed to play nBlockAlign bytes of audio, which
   1.135 +     * for PCM audio is the just the sample rate.
   1.136 +     */
   1.137 +    private int timeScale = 1;
   1.138 +    /**
   1.139 +     * The frameRate of the movie in timeScale units.
   1.140 +     * <p>
   1.141 +     * @see timeScale
   1.142 +     */
   1.143 +    private int frameRate = 30;
   1.144 +    /**
   1.145 +     * The states of the movie output stream.
   1.146 +     */
   1.147 +    private static enum States {
   1.148 +
   1.149 +        STARTED, FINISHED, CLOSED;
   1.150 +    }
   1.151 +    /**
   1.152 +     * The current state of the movie output stream.
   1.153 +     */
   1.154 +    private States state = States.FINISHED;
   1.155 +
   1.156 +    /**
   1.157 +     * AVI stores media data in samples.
   1.158 +     * A sample is a single element in a sequence of time-ordered data.
   1.159 +     */
   1.160 +    private static class Sample {
   1.161 +
   1.162 +        String chunkType;
   1.163 +        /** Offset of the sample relative to the start of the AVI file.
   1.164 +         */
   1.165 +        long offset;
   1.166 +        /** Data length of the sample. */
   1.167 +        long length;
   1.168 +        /** Whether the sample is a sync-sample. */
   1.169 +        boolean isSync;
   1.170 +
   1.171 +        /**
   1.172 +         * Creates a new sample.
   1.173 +         * @param duration
   1.174 +         * @param offset
   1.175 +         * @param length
   1.176 +         */
   1.177 +        public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
   1.178 +            this.chunkType = chunkId;
   1.179 +            this.offset = offset;
   1.180 +            this.length = length;
   1.181 +            this.isSync = isSync;
   1.182 +        }
   1.183 +    }
   1.184 +    /**
   1.185 +     * List of video frames.
   1.186 +     */
   1.187 +    private LinkedList<Sample> videoFrames;
   1.188 +    /**
   1.189 +     * This chunk holds the whole AVI content.
   1.190 +     */
   1.191 +    private CompositeChunk aviChunk;
   1.192 +    /**
   1.193 +     * This chunk holds the movie frames.
   1.194 +     */
   1.195 +    private CompositeChunk moviChunk;
   1.196 +    /**
   1.197 +     * This chunk holds the AVI Main Header.
   1.198 +     */
   1.199 +    FixedSizeDataChunk avihChunk;
   1.200 +    /**
   1.201 +     * This chunk holds the AVI Stream Header.
   1.202 +     */
   1.203 +    FixedSizeDataChunk strhChunk;
   1.204 +    /**
   1.205 +     * This chunk holds the AVI Stream Format Header.
   1.206 +     */
   1.207 +    FixedSizeDataChunk strfChunk;
   1.208 +
   1.209 +    /**
   1.210 +     * Chunk base class.
   1.211 +     */
   1.212 +    private abstract class Chunk {
   1.213 +
   1.214 +        /**
   1.215 +         * The chunkType of the chunk. A String with the length of 4 characters.
   1.216 +         */
   1.217 +        protected String chunkType;
   1.218 +        /**
   1.219 +         * The offset of the chunk relative to the start of the
   1.220 +         * ImageOutputStream.
   1.221 +         */
   1.222 +        protected long offset;
   1.223 +
   1.224 +        /**
   1.225 +         * Creates a new Chunk at the current position of the ImageOutputStream.
   1.226 +         * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
   1.227 +         */
   1.228 +        public Chunk(String chunkType) throws IOException {
   1.229 +            this.chunkType = chunkType;
   1.230 +            offset = getRelativeStreamPosition();
   1.231 +        }
   1.232 +
   1.233 +        /**
   1.234 +         * Writes the chunk to the ImageOutputStream and disposes it.
   1.235 +         */
   1.236 +        public abstract void finish() throws IOException;
   1.237 +
   1.238 +        /**
   1.239 +         * Returns the size of the chunk including the size of the chunk header.
   1.240 +         * @return The size of the chunk.
   1.241 +         */
   1.242 +        public abstract long size();
   1.243 +    }
   1.244 +
   1.245 +    /**
   1.246 +     * A CompositeChunk contains an ordered list of Chunks.
   1.247 +     */
   1.248 +    private class CompositeChunk extends Chunk {
   1.249 +
   1.250 +        /**
   1.251 +         * The type of the composite. A String with the length of 4 characters.
   1.252 +         */
   1.253 +        protected String compositeType;
   1.254 +        private LinkedList<Chunk> children;
   1.255 +        private boolean finished;
   1.256 +
   1.257 +        /**
   1.258 +         * Creates a new CompositeChunk at the current position of the
   1.259 +         * ImageOutputStream.
   1.260 +         * @param compositeType The type of the composite.
   1.261 +         * @param chunkType The type of the chunk.
   1.262 +         */
   1.263 +        public CompositeChunk(String compositeType, String chunkType) throws IOException {
   1.264 +            super(chunkType);
   1.265 +            this.compositeType = compositeType;
   1.266 +            //out.write
   1.267 +            out.writeLong(0); // make room for the chunk header
   1.268 +            out.writeInt(0); // make room for the chunk header
   1.269 +            children = new LinkedList<Chunk>();
   1.270 +        }
   1.271 +
   1.272 +        public void add(Chunk child) throws IOException {
   1.273 +            if (children.size() > 0) {
   1.274 +                children.getLast().finish();
   1.275 +            }
   1.276 +            children.add(child);
   1.277 +        }
   1.278 +
   1.279 +        /**
   1.280 +         * Writes the chunk and all its children to the ImageOutputStream
   1.281 +         * and disposes of all resources held by the chunk.
   1.282 +         * @throws java.io.IOException
   1.283 +         */
   1.284 +        @Override
   1.285 +        public void finish() throws IOException {
   1.286 +            if (!finished) {
   1.287 +                if (size() > 0xffffffffL) {
   1.288 +                    throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
   1.289 +                }
   1.290 +
   1.291 +                long pointer = getRelativeStreamPosition();
   1.292 +                seekRelative(offset);
   1.293 +
   1.294 +                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   1.295 +                headerData.writeType(compositeType);
   1.296 +                headerData.writeUInt(size() - 8);
   1.297 +                headerData.writeType(chunkType);
   1.298 +                for (Chunk child : children) {
   1.299 +                    child.finish();
   1.300 +                }
   1.301 +                seekRelative(pointer);
   1.302 +                if (size() % 2 == 1) {
   1.303 +                    out.writeByte(0); // write pad byte
   1.304 +                }
   1.305 +                finished = true;
   1.306 +            }
   1.307 +        }
   1.308 +
   1.309 +        @Override
   1.310 +        public long size() {
   1.311 +            long length = 12;
   1.312 +            for (Chunk child : children) {
   1.313 +                length += child.size() + child.size() % 2;
   1.314 +            }
   1.315 +            return length;
   1.316 +        }
   1.317 +    }
   1.318 +
   1.319 +    /**
   1.320 +     * Data Chunk.
   1.321 +     */
   1.322 +    private class DataChunk extends Chunk {
   1.323 +
   1.324 +        private DataChunkOutputStream data;
   1.325 +        private boolean finished;
   1.326 +
   1.327 +        /**
   1.328 +         * Creates a new DataChunk at the current position of the
   1.329 +         * ImageOutputStream.
   1.330 +         * @param chunkType The chunkType of the chunk.
   1.331 +         */
   1.332 +        public DataChunk(String name) throws IOException {
   1.333 +            super(name);
   1.334 +            out.writeLong(0); // make room for the chunk header
   1.335 +            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
   1.336 +        }
   1.337 +
   1.338 +        public DataChunkOutputStream getOutputStream() {
   1.339 +            if (finished) {
   1.340 +                throw new IllegalStateException("DataChunk is finished");
   1.341 +            }
   1.342 +            return data;
   1.343 +        }
   1.344 +
   1.345 +        @Override
   1.346 +        public void finish() throws IOException {
   1.347 +            if (!finished) {
   1.348 +                long sizeBefore = size();
   1.349 +
   1.350 +                if (size() > 0xffffffffL) {
   1.351 +                    throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
   1.352 +                }
   1.353 +
   1.354 +                long pointer = getRelativeStreamPosition();
   1.355 +                seekRelative(offset);
   1.356 +
   1.357 +                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   1.358 +                headerData.writeType(chunkType);
   1.359 +                headerData.writeUInt(size() - 8);
   1.360 +                seekRelative(pointer);
   1.361 +                if (size() % 2 == 1) {
   1.362 +                    out.writeByte(0); // write pad byte
   1.363 +                }
   1.364 +                finished = true;
   1.365 +                long sizeAfter = size();
   1.366 +                if (sizeBefore != sizeAfter) {
   1.367 +                    System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
   1.368 +                }
   1.369 +            }
   1.370 +        }
   1.371 +
   1.372 +        @Override
   1.373 +        public long size() {
   1.374 +            return 8 + data.size();
   1.375 +        }
   1.376 +    }
   1.377 +
   1.378 +    /**
   1.379 +     * A DataChunk with a fixed size.
   1.380 +     */
   1.381 +    private class FixedSizeDataChunk extends Chunk {
   1.382 +
   1.383 +        private DataChunkOutputStream data;
   1.384 +        private boolean finished;
   1.385 +        private long fixedSize;
   1.386 +
   1.387 +        /**
   1.388 +         * Creates a new DataChunk at the current position of the
   1.389 +         * ImageOutputStream.
   1.390 +         * @param chunkType The chunkType of the chunk.
   1.391 +         */
   1.392 +        public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
   1.393 +            super(chunkType);
   1.394 +            this.fixedSize = fixedSize;
   1.395 +            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   1.396 +            data.writeType(chunkType);
   1.397 +            data.writeUInt(fixedSize);
   1.398 +            data.clearCount();
   1.399 +
   1.400 +            // Fill fixed size with nulls
   1.401 +            byte[] buf = new byte[(int) Math.min(512, fixedSize)];
   1.402 +            long written = 0;
   1.403 +            while (written < fixedSize) {
   1.404 +                data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
   1.405 +                written += Math.min(buf.length, fixedSize - written);
   1.406 +            }
   1.407 +            if (fixedSize % 2 == 1) {
   1.408 +                out.writeByte(0); // write pad byte
   1.409 +            }
   1.410 +            seekToStartOfData();
   1.411 +        }
   1.412 +
   1.413 +        public DataChunkOutputStream getOutputStream() {
   1.414 +            /*if (finished) {
   1.415 +            throw new IllegalStateException("DataChunk is finished");
   1.416 +            }*/
   1.417 +            return data;
   1.418 +        }
   1.419 +
   1.420 +        public void seekToStartOfData() throws IOException {
   1.421 +            seekRelative(offset + 8);
   1.422 +            data.clearCount();
   1.423 +        }
   1.424 +
   1.425 +        public void seekToEndOfChunk() throws IOException {
   1.426 +            seekRelative(offset + 8 + fixedSize + fixedSize % 2);
   1.427 +        }
   1.428 +
   1.429 +        @Override
   1.430 +        public void finish() throws IOException {
   1.431 +            if (!finished) {
   1.432 +                finished = true;
   1.433 +            }
   1.434 +        }
   1.435 +
   1.436 +        @Override
   1.437 +        public long size() {
   1.438 +            return 8 + fixedSize;
   1.439 +        }
   1.440 +    }
   1.441 +
   1.442 +    /**
   1.443 +     * Creates a new AVI file with the specified video format and
   1.444 +     * frame rate. The video has 24 bits per pixel.
   1.445 +     *
   1.446 +     * @param file the output file
   1.447 +     * @param format Selects an encoder for the video format.
   1.448 +     * @param bitsPerPixel the number of bits per pixel.
   1.449 +     * @exception IllegalArgumentException if videoFormat is null or if
   1.450 +     * frame rate is <= 0
   1.451 +     */
   1.452 +    public AVIOutputStream(File file, VideoFormat format) throws IOException {
   1.453 +        this(file,format,24);
   1.454 +    }
   1.455 +    /**
   1.456 +     * Creates a new AVI file with the specified video format and
   1.457 +     * frame rate.
   1.458 +     *
   1.459 +     * @param file the output file
   1.460 +     * @param format Selects an encoder for the video format.
   1.461 +     * @param bitsPerPixel the number of bits per pixel.
   1.462 +     * @exception IllegalArgumentException if videoFormat is null or if
   1.463 +     * frame rate is <= 0
   1.464 +     */
   1.465 +    public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
   1.466 +        if (format == null) {
   1.467 +            throw new IllegalArgumentException("format must not be null");
   1.468 +        }
   1.469 +
   1.470 +        if (file.exists()) {
   1.471 +            file.delete();
   1.472 +        }
   1.473 +        this.out = new FileImageOutputStream(file);
   1.474 +        this.streamOffset = 0;
   1.475 +        this.videoFormat = format;
   1.476 +        this.videoFrames = new LinkedList<Sample>();
   1.477 +        this.imgDepth = bitsPerPixel;
   1.478 +        if (imgDepth == 4) {
   1.479 +            byte[] gray = new byte[16];
   1.480 +            for (int i = 0; i < gray.length; i++) {
   1.481 +                gray[i] = (byte) ((i << 4) | i);
   1.482 +            }
   1.483 +            palette = new IndexColorModel(4, 16, gray, gray, gray);
   1.484 +        } else if (imgDepth == 8) {
   1.485 +            byte[] gray = new byte[256];
   1.486 +            for (int i = 0; i < gray.length; i++) {
   1.487 +                gray[i] = (byte) i;
   1.488 +            }
   1.489 +            palette = new IndexColorModel(8, 256, gray, gray, gray);
   1.490 +        }
   1.491 +
   1.492 +    }
   1.493 +
   1.494 +    /**
   1.495 +     * Creates a new AVI output stream with the specified video format and
   1.496 +     * framerate.
   1.497 +     *
   1.498 +     * @param out the underlying output stream
   1.499 +     * @param format Selects an encoder for the video format.
   1.500 +     * @exception IllegalArgumentException if videoFormat is null or if
   1.501 +     * framerate is <= 0
   1.502 +     */
   1.503 +    public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
   1.504 +        if (format == null) {
   1.505 +            throw new IllegalArgumentException("format must not be null");
   1.506 +        }
   1.507 +        this.out = out;
   1.508 +        this.streamOffset = out.getStreamPosition();
   1.509 +        this.videoFormat = format;
   1.510 +        this.videoFrames = new LinkedList<Sample>();
   1.511 +    }
   1.512 +
   1.513 +    /**
   1.514 +     * Used with frameRate to specify the time scale that this stream will use.
   1.515 +     * Dividing frameRate by timeScale gives the number of samples per second.
   1.516 +     * For video streams, this is the frame rate. For audio streams, this rate
   1.517 +     * corresponds to the time needed to play nBlockAlign bytes of audio, which
   1.518 +     * for PCM audio is the just the sample rate.
   1.519 +     * <p>
   1.520 +     * The default value is 1.
   1.521 +     *
   1.522 +     * @param newValue
   1.523 +     */
   1.524 +    public void setTimeScale(int newValue) {
   1.525 +        if (newValue <= 0) {
   1.526 +            throw new IllegalArgumentException("timeScale must be greater 0");
   1.527 +        }
   1.528 +        this.timeScale = newValue;
   1.529 +    }
   1.530 +
   1.531 +    /**
   1.532 +     * Returns the time scale of this media.
   1.533 +     *
   1.534 +     * @return time scale
   1.535 +     */
   1.536 +    public int getTimeScale() {
   1.537 +        return timeScale;
   1.538 +    }
   1.539 +
   1.540 +    /**
   1.541 +     * Sets the rate of video frames in time scale units.
   1.542 +     * <p>
   1.543 +     * The default value is 30. Together with the default value 1 of timeScale
   1.544 +     * this results in 30 frames pers second.
   1.545 +     *
   1.546 +     * @param newValue
   1.547 +     */
   1.548 +    public void setFrameRate(int newValue) {
   1.549 +        if (newValue <= 0) {
   1.550 +            throw new IllegalArgumentException("frameDuration must be greater 0");
   1.551 +        }
   1.552 +        if (state == States.STARTED) {
   1.553 +            throw new IllegalStateException("frameDuration must be set before the first frame is written");
   1.554 +        }
   1.555 +        this.frameRate = newValue;
   1.556 +    }
   1.557 +
   1.558 +    /**
   1.559 +     * Returns the frame rate of this media.
   1.560 +     *
   1.561 +     * @return frame rate
   1.562 +     */
   1.563 +    public int getFrameRate() {
   1.564 +        return frameRate;
   1.565 +    }
   1.566 +
   1.567 +    /** Sets the global color palette. */
   1.568 +    public void setPalette(IndexColorModel palette) {
   1.569 +        this.palette = palette;
   1.570 +    }
   1.571 +
   1.572 +    /**
   1.573 +     * Sets the compression quality of the video track.
   1.574 +     * A value of 0 stands for "high compression is important" a value of
   1.575 +     * 1 for "high image quality is important".
   1.576 +     * <p>
   1.577 +     * Changing this value affects frames which are subsequently written
   1.578 +     * to the AVIOutputStream. Frames which have already been written
   1.579 +     * are not changed.
   1.580 +     * <p>
   1.581 +     * This value has only effect on videos encoded with JPG format.
   1.582 +     * <p>
   1.583 +     * The default value is 0.9.
   1.584 +     *
   1.585 +     * @param newValue
   1.586 +     */
   1.587 +    public void setVideoCompressionQuality(float newValue) {
   1.588 +        this.quality = newValue;
   1.589 +    }
   1.590 +
   1.591 +    /**
   1.592 +     * Returns the video compression quality.
   1.593 +     *
   1.594 +     * @return video compression quality
   1.595 +     */
   1.596 +    public float getVideoCompressionQuality() {
   1.597 +        return quality;
   1.598 +    }
   1.599 +
   1.600 +    /**
   1.601 +     * Sets the dimension of the video track.
   1.602 +     * <p>
   1.603 +     * You need to explicitly set the dimension, if you add all frames from
   1.604 +     * files or input streams.
   1.605 +     * <p>
   1.606 +     * If you add frames from buffered images, then AVIOutputStream
   1.607 +     * can determine the video dimension from the image width and height.
   1.608 +     *
   1.609 +     * @param width Must be greater than 0.
   1.610 +     * @param height Must be greater than 0.
   1.611 +     */
   1.612 +    public void setVideoDimension(int width, int height) {
   1.613 +        if (width < 1 || height < 1) {
   1.614 +            throw new IllegalArgumentException("width and height must be greater zero.");
   1.615 +        }
   1.616 +        this.imgWidth = width;
   1.617 +        this.imgHeight = height;
   1.618 +    }
   1.619 +
   1.620 +    /**
   1.621 +     * Gets the dimension of the video track.
   1.622 +     * <p>
   1.623 +     * Returns null if the dimension is not known.
   1.624 +     */
   1.625 +    public Dimension getVideoDimension() {
   1.626 +        if (imgWidth < 1 || imgHeight < 1) {
   1.627 +            return null;
   1.628 +        }
   1.629 +        return new Dimension(imgWidth, imgHeight);
   1.630 +    }
   1.631 +
   1.632 +    /**
   1.633 +     * Sets the state of the QuickTimeOutpuStream to started.
   1.634 +     * <p>
   1.635 +     * If the state is changed by this method, the prolog is
   1.636 +     * written.
   1.637 +     */
   1.638 +    private void ensureStarted() throws IOException {
   1.639 +        if (state != States.STARTED) {
   1.640 +            new Date();
   1.641 +            writeProlog();
   1.642 +            state = States.STARTED;
   1.643 +        }
   1.644 +    }
   1.645 +
   1.646 +    /**
   1.647 +     * Writes a frame to the video track.
   1.648 +     * <p>
   1.649 +     * If the dimension of the video track has not been specified yet, it
   1.650 +     * is derived from the first buffered image added to the AVIOutputStream.
   1.651 +     *
   1.652 +     * @param image The frame image.
   1.653 +     *
   1.654 +     * @throws IllegalArgumentException if the duration is less than 1, or
   1.655 +     * if the dimension of the frame does not match the dimension of the video
   1.656 +     * track.
   1.657 +     * @throws IOException if writing the image failed.
   1.658 +     */
   1.659 +    public void writeFrame(BufferedImage image) throws IOException {
   1.660 +        ensureOpen();
   1.661 +        ensureStarted();
   1.662 +
   1.663 +        // Get the dimensions of the first image
   1.664 +        if (imgWidth == -1) {
   1.665 +            imgWidth = image.getWidth();
   1.666 +            imgHeight = image.getHeight();
   1.667 +        } else {
   1.668 +            // The dimension of the image must match the dimension of the video track
   1.669 +            if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
   1.670 +                throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
   1.671 +                        + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
   1.672 +                        + ") differs from image[0] (width="
   1.673 +                        + imgWidth + ", height=" + imgHeight);
   1.674 +            }
   1.675 +        }
   1.676 +
   1.677 +        DataChunk videoFrameChunk;
   1.678 +        long offset = getRelativeStreamPosition();
   1.679 +        boolean isSync = true;
   1.680 +        switch (videoFormat) {
   1.681 +            case RAW: {
   1.682 +                switch (imgDepth) {
   1.683 +                    case 4: {
   1.684 +                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
   1.685 +                        int[] imgRGBs = new int[16];
   1.686 +                        imgPalette.getRGBs(imgRGBs);
   1.687 +                        int[] previousRGBs = new int[16];
   1.688 +                        if (previousPalette == null) {
   1.689 +                            previousPalette = palette;
   1.690 +                        }
   1.691 +                        previousPalette.getRGBs(previousRGBs);
   1.692 +                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
   1.693 +                            previousPalette = imgPalette;
   1.694 +                            DataChunk paletteChangeChunk = new DataChunk("00pc");
   1.695 +                            /*
   1.696 +                            int first = imgPalette.getMapSize();
   1.697 +                            int last = -1;
   1.698 +                            for (int i = 0; i < 16; i++) {
   1.699 +                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
   1.700 +                            first = i;
   1.701 +                            }
   1.702 +                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
   1.703 +                            last = i;
   1.704 +                            }
   1.705 +                            }*/
   1.706 +                            int first = 0;
   1.707 +                            int last = imgPalette.getMapSize() - 1;
   1.708 +                            /*
   1.709 +                             * typedef struct {
   1.710 +                            BYTE         bFirstEntry;
   1.711 +                            BYTE         bNumEntries;
   1.712 +                            WORD         wFlags;
   1.713 +                            PALETTEENTRY peNew[];
   1.714 +                            } AVIPALCHANGE;
   1.715 +                             *
   1.716 +                             * typedef struct tagPALETTEENTRY {
   1.717 +                            BYTE peRed;
   1.718 +                            BYTE peGreen;
   1.719 +                            BYTE peBlue;
   1.720 +                            BYTE peFlags;
   1.721 +                            } PALETTEENTRY;
   1.722 +                             */
   1.723 +                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
   1.724 +                            pOut.writeByte(first);//bFirstEntry
   1.725 +                            pOut.writeByte(last - first + 1);//bNumEntries
   1.726 +                            pOut.writeShort(0);//wFlags
   1.727 +
   1.728 +                            for (int i = first; i <= last; i++) {
   1.729 +                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
   1.730 +                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
   1.731 +                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
   1.732 +                                pOut.writeByte(0); // reserved*/
   1.733 +                            }
   1.734 +
   1.735 +                            moviChunk.add(paletteChangeChunk);
   1.736 +                            paletteChangeChunk.finish();
   1.737 +                            long length = getRelativeStreamPosition() - offset;
   1.738 +                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
   1.739 +                            offset = getRelativeStreamPosition();
   1.740 +                        }
   1.741 +
   1.742 +                        videoFrameChunk = new DataChunk("00db");
   1.743 +                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
   1.744 +                        byte[] rgb4 = new byte[imgWidth / 2];
   1.745 +                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
   1.746 +                            for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
   1.747 +                                rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
   1.748 +                            }
   1.749 +                            videoFrameChunk.getOutputStream().write(rgb4);
   1.750 +                        }
   1.751 +                        break;
   1.752 +                    }
   1.753 +                    case 8: {
   1.754 +                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
   1.755 +                        int[] imgRGBs = new int[256];
   1.756 +                        imgPalette.getRGBs(imgRGBs);
   1.757 +                        int[] previousRGBs = new int[256];
   1.758 +                        if (previousPalette == null) {
   1.759 +                            previousPalette = palette;
   1.760 +                        }
   1.761 +                        previousPalette.getRGBs(previousRGBs);
   1.762 +                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
   1.763 +                            previousPalette = imgPalette;
   1.764 +                            DataChunk paletteChangeChunk = new DataChunk("00pc");
   1.765 +                            /*
   1.766 +                            int first = imgPalette.getMapSize();
   1.767 +                            int last = -1;
   1.768 +                            for (int i = 0; i < 16; i++) {
   1.769 +                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
   1.770 +                            first = i;
   1.771 +                            }
   1.772 +                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
   1.773 +                            last = i;
   1.774 +                            }
   1.775 +                            }*/
   1.776 +                            int first = 0;
   1.777 +                            int last = imgPalette.getMapSize() - 1;
   1.778 +                            /*
   1.779 +                             * typedef struct {
   1.780 +                            BYTE         bFirstEntry;
   1.781 +                            BYTE         bNumEntries;
   1.782 +                            WORD         wFlags;
   1.783 +                            PALETTEENTRY peNew[];
   1.784 +                            } AVIPALCHANGE;
   1.785 +                             *
   1.786 +                             * typedef struct tagPALETTEENTRY {
   1.787 +                            BYTE peRed;
   1.788 +                            BYTE peGreen;
   1.789 +                            BYTE peBlue;
   1.790 +                            BYTE peFlags;
   1.791 +                            } PALETTEENTRY;
   1.792 +                             */
   1.793 +                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
   1.794 +                            pOut.writeByte(first);//bFirstEntry
   1.795 +                            pOut.writeByte(last - first + 1);//bNumEntries
   1.796 +                            pOut.writeShort(0);//wFlags
   1.797 +
   1.798 +                            for (int i = first; i <= last; i++) {
   1.799 +                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
   1.800 +                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
   1.801 +                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
   1.802 +                                pOut.writeByte(0); // reserved*/
   1.803 +                            }
   1.804 +
   1.805 +                            moviChunk.add(paletteChangeChunk);
   1.806 +                            paletteChangeChunk.finish();
   1.807 +                            long length = getRelativeStreamPosition() - offset;
   1.808 +                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
   1.809 +                            offset = getRelativeStreamPosition();
   1.810 +                        }
   1.811 +
   1.812 +                        videoFrameChunk = new DataChunk("00db");
   1.813 +                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
   1.814 +                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
   1.815 +                            videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
   1.816 +                        }
   1.817 +                        break;
   1.818 +                    }
   1.819 +                    default: {
   1.820 +                        videoFrameChunk = new DataChunk("00db");
   1.821 +                        WritableRaster raster = image.getRaster();
   1.822 +                        int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
   1.823 +                        byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
   1.824 +                        for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
   1.825 +                            raster.getPixels(0, y, imgWidth, 1, raw);
   1.826 +                            for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
   1.827 +                                bytes[x + 2] = (byte) raw[x]; // Blue
   1.828 +                                bytes[x + 1] = (byte) raw[x + 1]; // Green
   1.829 +                                bytes[x] = (byte) raw[x + 2]; // Red
   1.830 +                            }
   1.831 +                            videoFrameChunk.getOutputStream().write(bytes);
   1.832 +                        }
   1.833 +                        break;
   1.834 +                    }
   1.835 +                }
   1.836 +                break;
   1.837 +            }
   1.838 +            
   1.839 +            case JPG: {
   1.840 +                videoFrameChunk = new DataChunk("00dc");
   1.841 +                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
   1.842 +                ImageWriteParam iwParam = iw.getDefaultWriteParam();
   1.843 +                iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
   1.844 +                iwParam.setCompressionQuality(quality);
   1.845 +                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
   1.846 +                iw.setOutput(imgOut);
   1.847 +                IIOImage img = new IIOImage(image, null, null);
   1.848 +                iw.write(null, img, iwParam);
   1.849 +                iw.dispose();
   1.850 +                break;
   1.851 +            }
   1.852 +            case PNG:
   1.853 +            default: {
   1.854 +                videoFrameChunk = new DataChunk("00dc");
   1.855 +                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
   1.856 +                ImageWriteParam iwParam = iw.getDefaultWriteParam();
   1.857 +                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
   1.858 +                iw.setOutput(imgOut);
   1.859 +                IIOImage img = new IIOImage(image, null, null);
   1.860 +                iw.write(null, img, iwParam);
   1.861 +                iw.dispose();
   1.862 +                break;
   1.863 +            }
   1.864 +        }
   1.865 +        long length = getRelativeStreamPosition() - offset;
   1.866 +        moviChunk.add(videoFrameChunk);
   1.867 +        videoFrameChunk.finish();
   1.868 +
   1.869 +        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
   1.870 +        if (getRelativeStreamPosition() > 1L << 32) {
   1.871 +            throw new IOException("AVI file is larger than 4 GB");
   1.872 +        }
   1.873 +    }
   1.874 +
   1.875 +    /**
   1.876 +     * Writes a frame from a file to the video track.
   1.877 +     * <p>
   1.878 +     * This method does not inspect the contents of the file.
   1.879 +     * For example, Its your responsibility to only add JPG files if you have
   1.880 +     * chosen the JPEG video format.
   1.881 +     * <p>
   1.882 +     * If you add all frames from files or from input streams, then you
   1.883 +     * have to explicitly set the dimension of the video track before you
   1.884 +     * call finish() or close().
   1.885 +     *
   1.886 +     * @param file The file which holds the image data.
   1.887 +     *
   1.888 +     * @throws IllegalStateException if the duration is less than 1.
   1.889 +     * @throws IOException if writing the image failed.
   1.890 +     */
   1.891 +    public void writeFrame(File file) throws IOException {
   1.892 +        FileInputStream in = null;
   1.893 +        try {
   1.894 +            in = new FileInputStream(file);
   1.895 +            writeFrame(in);
   1.896 +        } finally {
   1.897 +            if (in != null) {
   1.898 +                in.close();
   1.899 +            }
   1.900 +        }
   1.901 +    }
   1.902 +
   1.903 +    /**
   1.904 +     * Writes a frame to the video track.
   1.905 +     * <p>
   1.906 +     * This method does not inspect the contents of the file.
   1.907 +     * For example, its your responsibility to only add JPG files if you have
   1.908 +     * chosen the JPEG video format.
   1.909 +     * <p>
   1.910 +     * If you add all frames from files or from input streams, then you
   1.911 +     * have to explicitly set the dimension of the video track before you
   1.912 +     * call finish() or close().
   1.913 +     *
   1.914 +     * @param in The input stream which holds the image data.
   1.915 +     *
   1.916 +     * @throws IllegalArgumentException if the duration is less than 1.
   1.917 +     * @throws IOException if writing the image failed.
   1.918 +     */
   1.919 +    public void writeFrame(InputStream in) throws IOException {
   1.920 +        ensureOpen();
   1.921 +        ensureStarted();
   1.922 +
   1.923 +        DataChunk videoFrameChunk = new DataChunk(
   1.924 +                videoFormat == VideoFormat.RAW ? "00db" : "00dc");
   1.925 +        moviChunk.add(videoFrameChunk);
   1.926 +        OutputStream mdatOut = videoFrameChunk.getOutputStream();
   1.927 +        long offset = getRelativeStreamPosition();
   1.928 +        byte[] buf = new byte[512];
   1.929 +        int len;
   1.930 +        while ((len = in.read(buf)) != -1) {
   1.931 +            mdatOut.write(buf, 0, len);
   1.932 +        }
   1.933 +        long length = getRelativeStreamPosition() - offset;
   1.934 +        videoFrameChunk.finish();
   1.935 +        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
   1.936 +        if (getRelativeStreamPosition() > 1L << 32) {
   1.937 +            throw new IOException("AVI file is larger than 4 GB");
   1.938 +        }
   1.939 +    }
   1.940 +
   1.941 +    /**
   1.942 +     * Closes the movie file as well as the stream being filtered.
   1.943 +     *
   1.944 +     * @exception IOException if an I/O error has occurred
   1.945 +     */
   1.946 +    public void close() throws IOException {
   1.947 +        if (state == States.STARTED) {
   1.948 +            finish();
   1.949 +        }
   1.950 +        if (state != States.CLOSED) {
   1.951 +            out.close();
   1.952 +            state = States.CLOSED;
   1.953 +        }
   1.954 +    }
   1.955 +
   1.956 +    /**
   1.957 +     * Finishes writing the contents of the AVI output stream without closing
   1.958 +     * the underlying stream. Use this method when applying multiple filters
   1.959 +     * in succession to the same output stream.
   1.960 +     *
   1.961 +     * @exception IllegalStateException if the dimension of the video track
   1.962 +     * has not been specified or determined yet.
   1.963 +     * @exception IOException if an I/O exception has occurred
   1.964 +     */
   1.965 +    public void finish() throws IOException {
   1.966 +        ensureOpen();
   1.967 +        if (state != States.FINISHED) {
   1.968 +            if (imgWidth == -1 || imgHeight == -1) {
   1.969 +                throw new IllegalStateException("image width and height must be specified");
   1.970 +            }
   1.971 +
   1.972 +            moviChunk.finish();
   1.973 +            writeEpilog();
   1.974 +            state = States.FINISHED;
   1.975 +            imgWidth = imgHeight = -1;
   1.976 +        }
   1.977 +    }
   1.978 +
   1.979 +    /**
   1.980 +     * Check to make sure that this stream has not been closed
   1.981 +     */
   1.982 +    private void ensureOpen() throws IOException {
   1.983 +        if (state == States.CLOSED) {
   1.984 +            throw new IOException("Stream closed");
   1.985 +        }
   1.986 +    }
   1.987 +
   1.988 +    /** Gets the position relative to the beginning of the QuickTime stream.
   1.989 +     * <p>
   1.990 +     * Usually this value is equal to the stream position of the underlying
   1.991 +     * ImageOutputStream, but can be larger if the underlying stream already
   1.992 +     * contained data.
   1.993 +     *
   1.994 +     * @return The relative stream position.
   1.995 +     * @throws IOException
   1.996 +     */
   1.997 +    private long getRelativeStreamPosition() throws IOException {
   1.998 +        return out.getStreamPosition() - streamOffset;
   1.999 +    }
  1.1000 +
  1.1001 +    /** Seeks relative to the beginning of the QuickTime stream.
  1.1002 +     * <p>
  1.1003 +     * Usually this equal to seeking in the underlying ImageOutputStream, but
  1.1004 +     * can be different if the underlying stream already contained data.
  1.1005 +     *
  1.1006 +     */
  1.1007 +    private void seekRelative(long newPosition) throws IOException {
  1.1008 +        out.seek(newPosition + streamOffset);
  1.1009 +    }
  1.1010 +
  1.1011 +    private void writeProlog() throws IOException {
  1.1012 +        // The file has the following structure:
  1.1013 +        //
  1.1014 +        // .RIFF AVI
  1.1015 +        // ..avih (AVI Header Chunk)
  1.1016 +        // ..LIST strl
  1.1017 +        // ...strh (Stream Header Chunk)
  1.1018 +        // ...strf (Stream Format Chunk)
  1.1019 +        // ..LIST movi
  1.1020 +        // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
  1.1021 +        // ..idx1 (List of video data chunks and their location in the file)
  1.1022 +
  1.1023 +        // The RIFF AVI Chunk holds the complete movie
  1.1024 +        aviChunk = new CompositeChunk("RIFF", "AVI ");
  1.1025 +        CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
  1.1026 +
  1.1027 +        // Write empty AVI Main Header Chunk - we fill the data in later
  1.1028 +        aviChunk.add(hdrlChunk);
  1.1029 +        avihChunk = new FixedSizeDataChunk("avih", 56);
  1.1030 +        avihChunk.seekToEndOfChunk();
  1.1031 +        hdrlChunk.add(avihChunk);
  1.1032 +
  1.1033 +        CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
  1.1034 +        hdrlChunk.add(strlChunk);
  1.1035 +
  1.1036 +        // Write empty AVI Stream Header Chunk - we fill the data in later
  1.1037 +        strhChunk = new FixedSizeDataChunk("strh", 56);
  1.1038 +        strhChunk.seekToEndOfChunk();
  1.1039 +        strlChunk.add(strhChunk);
  1.1040 +        strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
  1.1041 +        strfChunk.seekToEndOfChunk();
  1.1042 +        strlChunk.add(strfChunk);
  1.1043 +
  1.1044 +        moviChunk = new CompositeChunk("LIST", "movi");
  1.1045 +        aviChunk.add(moviChunk);
  1.1046 +
  1.1047 +
  1.1048 +    }
  1.1049 +
  1.1050 +    private void writeEpilog() throws IOException {
  1.1051 +       
  1.1052 +        long bufferSize = 0;
  1.1053 +        for (Sample s : videoFrames) {
  1.1054 +            if (s.length > bufferSize) {
  1.1055 +                bufferSize = s.length;
  1.1056 +            }
  1.1057 +        }
  1.1058 +
  1.1059 +
  1.1060 +        DataChunkOutputStream d;
  1.1061 +
  1.1062 +        /* Create Idx1 Chunk and write data
  1.1063 +         * -------------
  1.1064 +        typedef struct _avioldindex {
  1.1065 +        FOURCC  fcc;
  1.1066 +        DWORD   cb;
  1.1067 +        struct _avioldindex_entry {
  1.1068 +        DWORD   dwChunkId;
  1.1069 +        DWORD   dwFlags;
  1.1070 +        DWORD   dwOffset;
  1.1071 +        DWORD   dwSize;
  1.1072 +        } aIndex[];
  1.1073 +        } AVIOLDINDEX;
  1.1074 +         */
  1.1075 +        DataChunk idx1Chunk = new DataChunk("idx1");
  1.1076 +        aviChunk.add(idx1Chunk);
  1.1077 +        d = idx1Chunk.getOutputStream();
  1.1078 +        long moviListOffset = moviChunk.offset + 8;
  1.1079 +        //moviListOffset = 0;
  1.1080 +        for (Sample f : videoFrames) {
  1.1081 +
  1.1082 +            d.writeType(f.chunkType); // dwChunkId
  1.1083 +            // Specifies a FOURCC that identifies a stream in the AVI file. The
  1.1084 +            // FOURCC must have the form 'xxyy' where xx is the stream number and yy
  1.1085 +            // is a two-character code that identifies the contents of the stream:
  1.1086 +            //
  1.1087 +            // Two-character code   Description
  1.1088 +            //  db                  Uncompressed video frame
  1.1089 +            //  dc                  Compressed video frame
  1.1090 +            //  pc                  Palette change
  1.1091 +            //  wb                  Audio data
  1.1092 +
  1.1093 +            d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
  1.1094 +                    | (f.isSync ? 0x10 : 0x0)); // dwFlags
  1.1095 +            // Specifies a bitwise combination of zero or more of the following
  1.1096 +            // flags:
  1.1097 +            //
  1.1098 +            // Value    Name            Description
  1.1099 +            // 0x10     AVIIF_KEYFRAME  The data chunk is a key frame.
  1.1100 +            // 0x1      AVIIF_LIST      The data chunk is a 'rec ' list.
  1.1101 +            // 0x100    AVIIF_NO_TIME   The data chunk does not affect the timing of the
  1.1102 +            //                          stream. For example, this flag should be set for
  1.1103 +            //                          palette changes.
  1.1104 +
  1.1105 +            d.writeUInt(f.offset - moviListOffset); // dwOffset
  1.1106 +            // Specifies the location of the data chunk in the file. The value
  1.1107 +            // should be specified as an offset, in bytes, from the start of the
  1.1108 +            // 'movi' list; however, in some AVI files it is given as an offset from
  1.1109 +            // the start of the file.
  1.1110 +
  1.1111 +            d.writeUInt(f.length); // dwSize
  1.1112 +            // Specifies the size of the data chunk, in bytes.
  1.1113 +        }
  1.1114 +        idx1Chunk.finish();
  1.1115 +
  1.1116 +        /* Write Data into AVI Main Header Chunk
  1.1117 +         * -------------
  1.1118 +         * The AVIMAINHEADER structure defines global information in an AVI file.
  1.1119 +         * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
  1.1120 +        typedef struct _avimainheader {
  1.1121 +        FOURCC fcc;
  1.1122 +        DWORD  cb;
  1.1123 +        DWORD  dwMicroSecPerFrame;
  1.1124 +        DWORD  dwMaxBytesPerSec;
  1.1125 +        DWORD  dwPaddingGranularity;
  1.1126 +        DWORD  dwFlags;
  1.1127 +        DWORD  dwTotalFrames;
  1.1128 +        DWORD  dwInitialFrames;
  1.1129 +        DWORD  dwStreams;
  1.1130 +        DWORD  dwSuggestedBufferSize;
  1.1131 +        DWORD  dwWidth;
  1.1132 +        DWORD  dwHeight;
  1.1133 +        DWORD  dwReserved[4];
  1.1134 +        } AVIMAINHEADER; */
  1.1135 +        avihChunk.seekToStartOfData();
  1.1136 +        d = avihChunk.getOutputStream();
  1.1137 +
  1.1138 +        d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
  1.1139 +        // Specifies the number of microseconds between frames.
  1.1140 +        // This value indicates the overall timing for the file.
  1.1141 +
  1.1142 +        d.writeUInt(0); // dwMaxBytesPerSec
  1.1143 +        // Specifies the approximate maximum data rate of the file.
  1.1144 +        // This value indicates the number of bytes per second the system
  1.1145 +        // must handle to present an AVI sequence as specified by the other
  1.1146 +        // parameters contained in the main header and stream header chunks.
  1.1147 +
  1.1148 +        d.writeUInt(0); // dwPaddingGranularity
  1.1149 +        // Specifies the alignment for data, in bytes. Pad the data to multiples
  1.1150 +        // of this value.
  1.1151 +
  1.1152 +        d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
  1.1153 +        // Contains a bitwise combination of zero or more of the following
  1.1154 +        // flags:
  1.1155 +        //
  1.1156 +        // Value   Name         Description
  1.1157 +        // 0x10    AVIF_HASINDEX Indicates the AVI file has an index.
  1.1158 +        // 0x20    AVIF_MUSTUSEINDEX Indicates that application should use the
  1.1159 +        //                      index, rather than the physical ordering of the
  1.1160 +        //                      chunks in the file, to determine the order of
  1.1161 +        //                      presentation of the data. For example, this flag
  1.1162 +        //                      could be used to create a list of frames for
  1.1163 +        //                      editing.
  1.1164 +        // 0x100   AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
  1.1165 +        // 0x1000  AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
  1.1166 +        //                      allocated file used for capturing real-time
  1.1167 +        //                      video. Applications should warn the user before
  1.1168 +        //                      writing over a file with this flag set because
  1.1169 +        //                      the user probably defragmented this file.
  1.1170 +        // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
  1.1171 +        //                      data and software. When this flag is used,
  1.1172 +        //                      software should not permit the data to be
  1.1173 +        //                      duplicated.
  1.1174 +
  1.1175 +        d.writeUInt(videoFrames.size()); // dwTotalFrames
  1.1176 +        // Specifies the total number of frames of data in the file.
  1.1177 +
  1.1178 +        d.writeUInt(0); // dwInitialFrames
  1.1179 +        // Specifies the initial frame for interleaved files. Noninterleaved
  1.1180 +        // files should specify zero. If you are creating interleaved files,
  1.1181 +        // specify the number of frames in the file prior to the initial frame
  1.1182 +        // of the AVI sequence in this member.
  1.1183 +        // To give the audio driver enough audio to work with, the audio data in
  1.1184 +        // an interleaved file must be skewed from the video data. Typically,
  1.1185 +        // the audio data should be moved forward enough frames to allow
  1.1186 +        // approximately 0.75 seconds of audio data to be preloaded. The
  1.1187 +        // dwInitialRecords member should be set to the number of frames the
  1.1188 +        // audio is skewed. Also set the same value for the dwInitialFrames
  1.1189 +        // member of the AVISTREAMHEADER structure in the audio stream header
  1.1190 +
  1.1191 +        d.writeUInt(1); // dwStreams
  1.1192 +        // Specifies the number of streams in the file. For example, a file with
  1.1193 +        // audio and video has two streams.
  1.1194 +
  1.1195 +        d.writeUInt(bufferSize); // dwSuggestedBufferSize
  1.1196 +        // Specifies the suggested buffer size for reading the file. Generally,
  1.1197 +        // this size should be large enough to contain the largest chunk in the
  1.1198 +        // file. If set to zero, or if it is too small, the playback software
  1.1199 +        // will have to reallocate memory during playback, which will reduce
  1.1200 +        // performance. For an interleaved file, the buffer size should be large
  1.1201 +        // enough to read an entire record, and not just a chunk.
  1.1202 +
  1.1203 +
  1.1204 +        d.writeUInt(imgWidth); // dwWidth
  1.1205 +        // Specifies the width of the AVI file in pixels.
  1.1206 +
  1.1207 +        d.writeUInt(imgHeight); // dwHeight
  1.1208 +        // Specifies the height of the AVI file in pixels.
  1.1209 +
  1.1210 +        d.writeUInt(0); // dwReserved[0]
  1.1211 +        d.writeUInt(0); // dwReserved[1]
  1.1212 +        d.writeUInt(0); // dwReserved[2]
  1.1213 +        d.writeUInt(0); // dwReserved[3]
  1.1214 +        // Reserved. Set this array to zero.
  1.1215 +
  1.1216 +        /* Write Data into AVI Stream Header Chunk
  1.1217 +         * -------------
  1.1218 +         * The AVISTREAMHEADER structure contains information about one stream
  1.1219 +         * in an AVI file.
  1.1220 +         * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
  1.1221 +        typedef struct _avistreamheader {
  1.1222 +        FOURCC fcc;
  1.1223 +        DWORD  cb;
  1.1224 +        FOURCC fccType;
  1.1225 +        FOURCC fccHandler;
  1.1226 +        DWORD  dwFlags;
  1.1227 +        WORD   wPriority;
  1.1228 +        WORD   wLanguage;
  1.1229 +        DWORD  dwInitialFrames;
  1.1230 +        DWORD  dwScale;
  1.1231 +        DWORD  dwRate;
  1.1232 +        DWORD  dwStart;
  1.1233 +        DWORD  dwLength;
  1.1234 +        DWORD  dwSuggestedBufferSize;
  1.1235 +        DWORD  dwQuality;
  1.1236 +        DWORD  dwSampleSize;
  1.1237 +        struct {
  1.1238 +        short int left;
  1.1239 +        short int top;
  1.1240 +        short int right;
  1.1241 +        short int bottom;
  1.1242 +        }  rcFrame;
  1.1243 +        } AVISTREAMHEADER;
  1.1244 +         */
  1.1245 +        strhChunk.seekToStartOfData();
  1.1246 +        d = strhChunk.getOutputStream();
  1.1247 +        d.writeType("vids"); // fccType - vids for video stream
  1.1248 +        // Contains a FOURCC that specifies the type of the data contained in
  1.1249 +        // the stream. The following standard AVI values for video and audio are
  1.1250 +        // defined:
  1.1251 +        //
  1.1252 +        // FOURCC   Description
  1.1253 +        // 'auds'   Audio stream
  1.1254 +        // 'mids'   MIDI stream
  1.1255 +        // 'txts'   Text stream
  1.1256 +        // 'vids'   Video stream
  1.1257 +
  1.1258 +        switch (videoFormat) {
  1.1259 +            case RAW:
  1.1260 +                d.writeType("DIB "); // fccHandler - DIB for Raw RGB
  1.1261 +                break;
  1.1262 +            case RLE:
  1.1263 +                d.writeType("RLE "); // fccHandler - Microsoft RLE
  1.1264 +                break;
  1.1265 +            case JPG:
  1.1266 +                d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
  1.1267 +                break;
  1.1268 +            case PNG:
  1.1269 +            default:
  1.1270 +                d.writeType("png "); // fccHandler - png for PNG
  1.1271 +                break;
  1.1272 +        }
  1.1273 +        // Optionally, contains a FOURCC that identifies a specific data
  1.1274 +        // handler. The data handler is the preferred handler for the stream.
  1.1275 +        // For audio and video streams, this specifies the codec for decoding
  1.1276 +        // the stream.
  1.1277 +
  1.1278 +        if (imgDepth <= 8) {
  1.1279 +            d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
  1.1280 +        } else {
  1.1281 +            d.writeUInt(0); // dwFlags
  1.1282 +        }
  1.1283 +
  1.1284 +        // Contains any flags for the data stream. The bits in the high-order
  1.1285 +        // word of these flags are specific to the type of data contained in the
  1.1286 +        // stream. The following standard flags are defined:
  1.1287 +        //
  1.1288 +        // Value    Name        Description
  1.1289 +        //          AVISF_DISABLED 0x00000001 Indicates this stream should not
  1.1290 +        //                      be enabled by default.
  1.1291 +        //          AVISF_VIDEO_PALCHANGES 0x00010000
  1.1292 +        //                      Indicates this video stream contains
  1.1293 +        //                      palette changes. This flag warns the playback
  1.1294 +        //                      software that it will need to animate the
  1.1295 +        //                      palette.
  1.1296 +
  1.1297 +        d.writeUShort(0); // wPriority
  1.1298 +        // Specifies priority of a stream type. For example, in a file with
  1.1299 +        // multiple audio streams, the one with the highest priority might be
  1.1300 +        // the default stream.
  1.1301 +
  1.1302 +        d.writeUShort(0); // wLanguage
  1.1303 +        // Language tag.
  1.1304 +
  1.1305 +        d.writeUInt(0); // dwInitialFrames
  1.1306 +        // Specifies how far audio data is skewed ahead of the video frames in
  1.1307 +        // interleaved files. Typically, this is about 0.75 seconds. If you are
  1.1308 +        // creating interleaved files, specify the number of frames in the file
  1.1309 +        // prior to the initial frame of the AVI sequence in this member. For
  1.1310 +        // more information, see the remarks for the dwInitialFrames member of
  1.1311 +        // the AVIMAINHEADER structure.
  1.1312 +
  1.1313 +        d.writeUInt(timeScale); // dwScale
  1.1314 +        // Used with dwRate to specify the time scale that this stream will use.
  1.1315 +        // Dividing dwRate by dwScale gives the number of samples per second.
  1.1316 +        // For video streams, this is the frame rate. For audio streams, this
  1.1317 +        // rate corresponds to the time needed to play nBlockAlign bytes of
  1.1318 +        // audio, which for PCM audio is the just the sample rate.
  1.1319 +
  1.1320 +        d.writeUInt(frameRate); // dwRate
  1.1321 +        // See dwScale.
  1.1322 +
  1.1323 +        d.writeUInt(0); // dwStart
  1.1324 +        // Specifies the starting time for this stream. The units are defined by
  1.1325 +        // the dwRate and dwScale members in the main file header. Usually, this
  1.1326 +        // is zero, but it can specify a delay time for a stream that does not
  1.1327 +        // start concurrently with the file.
  1.1328 +
  1.1329 +        d.writeUInt(videoFrames.size()); // dwLength
  1.1330 +        // Specifies the length of this stream. The units are defined by the
  1.1331 +        // dwRate and dwScale members of the stream's header.
  1.1332 +
  1.1333 +        d.writeUInt(bufferSize); // dwSuggestedBufferSize
  1.1334 +        // Specifies how large a buffer should be used to read this stream.
  1.1335 +        // Typically, this contains a value corresponding to the largest chunk
  1.1336 +        // present in the stream. Using the correct buffer size makes playback
  1.1337 +        // more efficient. Use zero if you do not know the correct buffer size.
  1.1338 +
  1.1339 +        d.writeInt(-1); // dwQuality
  1.1340 +        // Specifies an indicator of the quality of the data in the stream.
  1.1341 +        // Quality is represented as a number between 0 and 10,000.
  1.1342 +        // For compressed data, this typically represents the value of the
  1.1343 +        // quality parameter passed to the compression software. If set to –1,
  1.1344 +        // drivers use the default quality value.
  1.1345 +
  1.1346 +        d.writeUInt(0); // dwSampleSize
  1.1347 +        // Specifies the size of a single sample of data. This is set to zero
  1.1348 +        // if the samples can vary in size. If this number is nonzero, then
  1.1349 +        // multiple samples of data can be grouped into a single chunk within
  1.1350 +        // the file. If it is zero, each sample of data (such as a video frame)
  1.1351 +        // must be in a separate chunk. For video streams, this number is
  1.1352 +        // typically zero, although it can be nonzero if all video frames are
  1.1353 +        // the same size. For audio streams, this number should be the same as
  1.1354 +        // the nBlockAlign member of the WAVEFORMATEX structure describing the
  1.1355 +        // audio.
  1.1356 +
  1.1357 +        d.writeUShort(0); // rcFrame.left
  1.1358 +        d.writeUShort(0); // rcFrame.top
  1.1359 +        d.writeUShort(imgWidth); // rcFrame.right
  1.1360 +        d.writeUShort(imgHeight); // rcFrame.bottom
  1.1361 +        // Specifies the destination rectangle for a text or video stream within
  1.1362 +        // the movie rectangle specified by the dwWidth and dwHeight members of
  1.1363 +        // the AVI main header structure. The rcFrame member is typically used
  1.1364 +        // in support of multiple video streams. Set this rectangle to the
  1.1365 +        // coordinates corresponding to the movie rectangle to update the whole
  1.1366 +        // movie rectangle. Units for this member are pixels. The upper-left
  1.1367 +        // corner of the destination rectangle is relative to the upper-left
  1.1368 +        // corner of the movie rectangle.
  1.1369 +
  1.1370 +        /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
  1.1371 +        /* -------------
  1.1372 +         * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
  1.1373 +        typedef struct tagBITMAPINFOHEADER {
  1.1374 +        DWORD  biSize;
  1.1375 +        LONG   biWidth;
  1.1376 +        LONG   biHeight;
  1.1377 +        WORD   biPlanes;
  1.1378 +        WORD   biBitCount;
  1.1379 +        DWORD  biCompression;
  1.1380 +        DWORD  biSizeImage;
  1.1381 +        LONG   biXPelsPerMeter;
  1.1382 +        LONG   biYPelsPerMeter;
  1.1383 +        DWORD  biClrUsed;
  1.1384 +        DWORD  biClrImportant;
  1.1385 +        } BITMAPINFOHEADER;
  1.1386 +         */
  1.1387 +        strfChunk.seekToStartOfData();
  1.1388 +        d = strfChunk.getOutputStream();
  1.1389 +        d.writeUInt(40); // biSize
  1.1390 +        // Specifies the number of bytes required by the structure. This value
  1.1391 +        // does not include the size of the color table or the size of the color
  1.1392 +        // masks, if they are appended to the end of structure.
  1.1393 +
  1.1394 +        d.writeInt(imgWidth); // biWidth
  1.1395 +        // Specifies the width of the bitmap, in pixels.
  1.1396 +
  1.1397 +        d.writeInt(imgHeight); // biHeight
  1.1398 +        // Specifies the height of the bitmap, in pixels.
  1.1399 +        //
  1.1400 +        // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
  1.1401 +        // a bottom-up DIB with the origin at the lower left corner. If biHeight
  1.1402 +        // is negative, the bitmap is a top-down DIB with the origin at the
  1.1403 +        // upper left corner.
  1.1404 +        // For YUV bitmaps, the bitmap is always top-down, regardless of the
  1.1405 +        // sign of biHeight. Decoders should offer YUV formats with postive
  1.1406 +        // biHeight, but for backward compatibility they should accept YUV
  1.1407 +        // formats with either positive or negative biHeight.
  1.1408 +        // For compressed formats, biHeight must be positive, regardless of
  1.1409 +        // image orientation.
  1.1410 +
  1.1411 +        d.writeShort(1); // biPlanes
  1.1412 +        // Specifies the number of planes for the target device. This value must
  1.1413 +        // be set to 1.
  1.1414 +
  1.1415 +        d.writeShort(imgDepth); // biBitCount
  1.1416 +        // Specifies the number of bits per pixel (bpp).  For uncompressed
  1.1417 +        // formats, this value is the average number of bits per pixel. For
  1.1418 +        // compressed formats, this value is the implied bit depth of the
  1.1419 +        // uncompressed image, after the image has been decoded.
  1.1420 +
  1.1421 +        switch (videoFormat) {
  1.1422 +            case RAW:
  1.1423 +            default:
  1.1424 +                d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
  1.1425 +                break;
  1.1426 +            case RLE:
  1.1427 +                if (imgDepth == 8) {
  1.1428 +                    d.writeInt(1); // biCompression - BI_RLE8
  1.1429 +                } else if (imgDepth == 4) {
  1.1430 +                    d.writeInt(2); // biCompression - BI_RLE4
  1.1431 +                } else {
  1.1432 +                    throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
  1.1433 +                }
  1.1434 +                break;
  1.1435 +            case JPG:
  1.1436 +                d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
  1.1437 +                break;
  1.1438 +            case PNG:
  1.1439 +                d.writeType("png "); // biCompression - png for PNG
  1.1440 +                break;
  1.1441 +        }
  1.1442 +        // For compressed video and YUV formats, this member is a FOURCC code,
  1.1443 +        // specified as a DWORD in little-endian order. For example, YUYV video
  1.1444 +        // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
  1.1445 +        // Codes.
  1.1446 +        //
  1.1447 +        // For uncompressed RGB formats, the following values are possible:
  1.1448 +        //
  1.1449 +        // Value        Description
  1.1450 +        // BI_RGB       0x00000000 Uncompressed RGB.
  1.1451 +        // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
  1.1452 +        //                         Valid for 16-bpp and 32-bpp bitmaps.
  1.1453 +        //
  1.1454 +        // Note that BI_JPG and BI_PNG are not valid video formats.
  1.1455 +        //
  1.1456 +        // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
  1.1457 +        // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
  1.1458 +        // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
  1.1459 +        // structure to determine the specific RGB type.
  1.1460 +
  1.1461 +        switch (videoFormat) {
  1.1462 +            case RAW:
  1.1463 +                d.writeInt(0); // biSizeImage
  1.1464 +                break;
  1.1465 +            case RLE:
  1.1466 +            case JPG:
  1.1467 +            case PNG:
  1.1468 +            default:
  1.1469 +                if (imgDepth == 4) {
  1.1470 +                    d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
  1.1471 +                } else {
  1.1472 +                    int bytesPerPixel = Math.max(1, imgDepth / 8);
  1.1473 +                    d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
  1.1474 +                }
  1.1475 +                break;
  1.1476 +        }
  1.1477 +        // Specifies the size, in bytes, of the image. This can be set to 0 for
  1.1478 +        // uncompressed RGB bitmaps.
  1.1479 +
  1.1480 +        d.writeInt(0); // biXPelsPerMeter
  1.1481 +        // Specifies the horizontal resolution, in pixels per meter, of the
  1.1482 +        // target device for the bitmap.
  1.1483 +
  1.1484 +        d.writeInt(0); // biYPelsPerMeter
  1.1485 +        // Specifies the vertical resolution, in pixels per meter, of the target
  1.1486 +        // device for the bitmap.
  1.1487 +
  1.1488 +        d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
  1.1489 +        // Specifies the number of color indices in the color table that are
  1.1490 +        // actually used by the bitmap.
  1.1491 +
  1.1492 +        d.writeInt(0); // biClrImportant
  1.1493 +        // Specifies the number of color indices that are considered important
  1.1494 +        // for displaying the bitmap. If this value is zero, all colors are
  1.1495 +        // important.
  1.1496 +
  1.1497 +        if (palette != null) {
  1.1498 +            for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
  1.1499 +                /*
  1.1500 +                 * typedef struct tagRGBQUAD {
  1.1501 +                BYTE rgbBlue;
  1.1502 +                BYTE rgbGreen;
  1.1503 +                BYTE rgbRed;
  1.1504 +                BYTE rgbReserved; // This member is reserved and must be zero.
  1.1505 +                } RGBQUAD;
  1.1506 +                 */
  1.1507 +                d.write(palette.getBlue(i));
  1.1508 +                d.write(palette.getGreen(i));
  1.1509 +                d.write(palette.getRed(i));
  1.1510 +                d.write(0);
  1.1511 +            }
  1.1512 +        }
  1.1513 +
  1.1514 +
  1.1515 +        // -----------------
  1.1516 +        aviChunk.finish();
  1.1517 +    }
  1.1518 +}