diff src/com/aurellem/capture/AVIOutputStream.java @ 3:a92de00f0414

migrating files
author Robert McIntyre <rlm@mit.edu>
date Tue, 25 Oct 2011 11:55:55 -0700
parents
children
line wrap: on
line diff
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/com/aurellem/capture/AVIOutputStream.java	Tue Oct 25 11:55:55 2011 -0700
     1.3 @@ -0,0 +1,1548 @@
     1.4 +/**
     1.5 + * @(#)AVIOutputStream.java  1.5.1  2011-01-17
     1.6 + *
     1.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
     1.8 + * All rights reserved.
     1.9 + *
    1.10 + * You may not use, copy or modify this file, except in compliance with the
    1.11 + * license agreement you entered into with Werner Randelshofer.
    1.12 + * For details see accompanying license terms.
    1.13 + */
    1.14 +package com.aurellem.capture;
    1.15 +
    1.16 +import java.awt.Dimension;
    1.17 +import java.awt.image.BufferedImage;
    1.18 +import java.awt.image.DataBufferByte;
    1.19 +import java.awt.image.IndexColorModel;
    1.20 +import java.awt.image.WritableRaster;
    1.21 +import java.io.File;
    1.22 +import java.io.FileInputStream;
    1.23 +import java.io.IOException;
    1.24 +import java.io.InputStream;
    1.25 +import java.io.OutputStream;
    1.26 +import java.util.Arrays;
    1.27 +import java.util.Date;
    1.28 +import java.util.LinkedList;
    1.29 +
    1.30 +import javax.imageio.IIOImage;
    1.31 +import javax.imageio.ImageIO;
    1.32 +import javax.imageio.ImageWriteParam;
    1.33 +import javax.imageio.ImageWriter;
    1.34 +import javax.imageio.stream.FileImageOutputStream;
    1.35 +import javax.imageio.stream.ImageOutputStream;
    1.36 +import javax.imageio.stream.MemoryCacheImageOutputStream;
    1.37 +
    1.38 +/**
    1.39 + * This class supports writing of images into an AVI 1.0 video file.
    1.40 + * <p>
    1.41 + * The images are written as video frames.
    1.42 + * <p>
    1.43 + * Video frames can be encoded with one of the following formats:
    1.44 + * <ul>
    1.45 + * <li>JPEG</li>
    1.46 + * <li>PNG</li>
    1.47 + * <li>RAW</li>
    1.48 + * <li>RLE</li>
    1.49 + * </ul>
    1.50 + * All frames must have the same format.
    1.51 + * When JPG is used each frame can have an individual encoding quality.
    1.52 + * <p>
    1.53 + * All frames in an AVI file must have the same duration. The duration can
    1.54 + * be set by setting an appropriate pair of values using methods
    1.55 + * {@link #setFrameRate} and {@link #setTimeScale}.
    1.56 + * <p>
    1.57 + * The length of an AVI 1.0 file is limited to 1 GB.
    1.58 + * This class supports lengths of up to 4 GB, but such files may not work on
    1.59 + * all players.
    1.60 + * <p>
    1.61 + * For detailed information about the AVI RIFF file format see:<br>
    1.62 + * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
    1.63 + * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
    1.64 + * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
    1.65 + *
    1.66 + * @author Werner Randelshofer
    1.67 + * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
    1.68 + * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
    1.69 + * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
    1.70 + * in "idx1" chunk.
    1.71 + * <br>1.3.2 2010-12-27 File size limit is 1 GB.
    1.72 + * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
    1.73 + * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
    1.74 + * Added method getVideoDimension().
    1.75 + * <br>1.2 2009-08-29 Adds support for RAW video format.
    1.76 + * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
    1.77 + * chunk. Changed the API to reflect that AVI works with frame rates instead of
    1.78 + * with frame durations.
    1.79 + * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
    1.80 + * encoded video.
    1.81 + * <br>1.0 2008-08-11 Created.
    1.82 + */
    1.83 +public class AVIOutputStream {
    1.84 +
    1.85 +    /**
    1.86 +     * Underlying output stream.
    1.87 +     */
    1.88 +    private ImageOutputStream out;
    1.89 +    /** The offset of the QuickTime stream in the underlying ImageOutputStream.
    1.90 +     * Normally this is 0 unless the underlying stream already contained data
    1.91 +     * when it was passed to the constructor.
    1.92 +     */
    1.93 +    private long streamOffset;
    1.94 +    /** Previous frame for delta compression. */
    1.95 +    private Object previousData;
    1.96 +
    1.97 +    /**
    1.98 +     * Supported video encodings.
    1.99 +     */
   1.100 +    public static enum VideoFormat {
   1.101 +
   1.102 +        RAW, RLE, JPG, PNG;
   1.103 +    }
   1.104 +    /**
   1.105 +     * Current video formats.
   1.106 +     */
   1.107 +    private VideoFormat videoFormat;
   1.108 +    /**
   1.109 +     * Quality of JPEG encoded video frames.
   1.110 +     */
   1.111 +    private float quality = 0.9f;
   1.112 +    /**
   1.113 +     * Creation time of the movie output stream.
   1.114 +     */
   1.115 +    private Date creationTime;
   1.116 +    /**
   1.117 +     * Width of the video frames. All frames must have the same width.
   1.118 +     * The value -1 is used to mark unspecified width.
   1.119 +     */
   1.120 +    private int imgWidth = -1;
   1.121 +    /**
   1.122 +     * Height of the video frames. All frames must have the same height.
   1.123 +     * The value -1 is used to mark unspecified height.
   1.124 +     */
   1.125 +    private int imgHeight = -1;
   1.126 +    /** Number of bits per pixel. */
   1.127 +    private int imgDepth = 24;
   1.128 +    /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
   1.129 +    private IndexColorModel palette;
   1.130 +    private IndexColorModel previousPalette;
   1.131 +    /** Video encoder. */
   1.132 +    
   1.133 +    /**
   1.134 +     * The timeScale of the movie.
   1.135 +     * <p>
   1.136 +     * Used with frameRate to specify the time scale that this stream will use.
   1.137 +     * Dividing frameRate by timeScale gives the number of samples per second.
   1.138 +     * For video streams, this is the frame rate. For audio streams, this rate
   1.139 +     * corresponds to the time needed to play nBlockAlign bytes of audio, which
   1.140 +     * for PCM audio is the just the sample rate.
   1.141 +     */
   1.142 +    private int timeScale = 1;
   1.143 +    /**
   1.144 +     * The frameRate of the movie in timeScale units.
   1.145 +     * <p>
   1.146 +     * @see timeScale
   1.147 +     */
   1.148 +    private int frameRate = 30;
   1.149 +    /** Interval between keyframes. */
   1.150 +    private int syncInterval = 30;
   1.151 +
   1.152 +    /**
   1.153 +     * The states of the movie output stream.
   1.154 +     */
   1.155 +    private static enum States {
   1.156 +
   1.157 +        STARTED, FINISHED, CLOSED;
   1.158 +    }
   1.159 +    /**
   1.160 +     * The current state of the movie output stream.
   1.161 +     */
   1.162 +    private States state = States.FINISHED;
   1.163 +
   1.164 +    /**
   1.165 +     * AVI stores media data in samples.
   1.166 +     * A sample is a single element in a sequence of time-ordered data.
   1.167 +     */
   1.168 +    private static class Sample {
   1.169 +
   1.170 +        String chunkType;
   1.171 +        /** Offset of the sample relative to the start of the AVI file.
   1.172 +         */
   1.173 +        long offset;
   1.174 +        /** Data length of the sample. */
   1.175 +        long length;
   1.176 +        /**
   1.177 +         * The duration of the sample in time scale units.
   1.178 +         */
   1.179 +        int duration;
   1.180 +        /** Whether the sample is a sync-sample. */
   1.181 +        boolean isSync;
   1.182 +
   1.183 +        /**
   1.184 +         * Creates a new sample.
   1.185 +         * @param duration
   1.186 +         * @param offset
   1.187 +         * @param length
   1.188 +         */
   1.189 +        public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
   1.190 +            this.chunkType = chunkId;
   1.191 +            this.duration = duration;
   1.192 +            this.offset = offset;
   1.193 +            this.length = length;
   1.194 +            this.isSync = isSync;
   1.195 +        }
   1.196 +    }
   1.197 +    /**
   1.198 +     * List of video frames.
   1.199 +     */
   1.200 +    private LinkedList<Sample> videoFrames;
   1.201 +    /**
   1.202 +     * This chunk holds the whole AVI content.
   1.203 +     */
   1.204 +    private CompositeChunk aviChunk;
   1.205 +    /**
   1.206 +     * This chunk holds the movie frames.
   1.207 +     */
   1.208 +    private CompositeChunk moviChunk;
   1.209 +    /**
   1.210 +     * This chunk holds the AVI Main Header.
   1.211 +     */
   1.212 +    FixedSizeDataChunk avihChunk;
   1.213 +    /**
   1.214 +     * This chunk holds the AVI Stream Header.
   1.215 +     */
   1.216 +    FixedSizeDataChunk strhChunk;
   1.217 +    /**
   1.218 +     * This chunk holds the AVI Stream Format Header.
   1.219 +     */
   1.220 +    FixedSizeDataChunk strfChunk;
   1.221 +
   1.222 +    /**
   1.223 +     * Chunk base class.
   1.224 +     */
   1.225 +    private abstract class Chunk {
   1.226 +
   1.227 +        /**
   1.228 +         * The chunkType of the chunk. A String with the length of 4 characters.
   1.229 +         */
   1.230 +        protected String chunkType;
   1.231 +        /**
   1.232 +         * The offset of the chunk relative to the start of the
   1.233 +         * ImageOutputStream.
   1.234 +         */
   1.235 +        protected long offset;
   1.236 +
   1.237 +        /**
   1.238 +         * Creates a new Chunk at the current position of the ImageOutputStream.
   1.239 +         * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
   1.240 +         */
   1.241 +        public Chunk(String chunkType) throws IOException {
   1.242 +            this.chunkType = chunkType;
   1.243 +            offset = getRelativeStreamPosition();
   1.244 +        }
   1.245 +
   1.246 +        /**
   1.247 +         * Writes the chunk to the ImageOutputStream and disposes it.
   1.248 +         */
   1.249 +        public abstract void finish() throws IOException;
   1.250 +
   1.251 +        /**
   1.252 +         * Returns the size of the chunk including the size of the chunk header.
   1.253 +         * @return The size of the chunk.
   1.254 +         */
   1.255 +        public abstract long size();
   1.256 +    }
   1.257 +
   1.258 +    /**
   1.259 +     * A CompositeChunk contains an ordered list of Chunks.
   1.260 +     */
   1.261 +    private class CompositeChunk extends Chunk {
   1.262 +
   1.263 +        /**
   1.264 +         * The type of the composite. A String with the length of 4 characters.
   1.265 +         */
   1.266 +        protected String compositeType;
   1.267 +        private LinkedList<Chunk> children;
   1.268 +        private boolean finished;
   1.269 +
   1.270 +        /**
   1.271 +         * Creates a new CompositeChunk at the current position of the
   1.272 +         * ImageOutputStream.
   1.273 +         * @param compositeType The type of the composite.
   1.274 +         * @param chunkType The type of the chunk.
   1.275 +         */
   1.276 +        public CompositeChunk(String compositeType, String chunkType) throws IOException {
   1.277 +            super(chunkType);
   1.278 +            this.compositeType = compositeType;
   1.279 +            //out.write
   1.280 +            out.writeLong(0); // make room for the chunk header
   1.281 +            out.writeInt(0); // make room for the chunk header
   1.282 +            children = new LinkedList<Chunk>();
   1.283 +        }
   1.284 +
   1.285 +        public void add(Chunk child) throws IOException {
   1.286 +            if (children.size() > 0) {
   1.287 +                children.getLast().finish();
   1.288 +            }
   1.289 +            children.add(child);
   1.290 +        }
   1.291 +
   1.292 +        /**
   1.293 +         * Writes the chunk and all its children to the ImageOutputStream
   1.294 +         * and disposes of all resources held by the chunk.
   1.295 +         * @throws java.io.IOException
   1.296 +         */
   1.297 +        @Override
   1.298 +        public void finish() throws IOException {
   1.299 +            if (!finished) {
   1.300 +                if (size() > 0xffffffffL) {
   1.301 +                    throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
   1.302 +                }
   1.303 +
   1.304 +                long pointer = getRelativeStreamPosition();
   1.305 +                seekRelative(offset);
   1.306 +
   1.307 +                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   1.308 +                headerData.writeType(compositeType);
   1.309 +                headerData.writeUInt(size() - 8);
   1.310 +                headerData.writeType(chunkType);
   1.311 +                for (Chunk child : children) {
   1.312 +                    child.finish();
   1.313 +                }
   1.314 +                seekRelative(pointer);
   1.315 +                if (size() % 2 == 1) {
   1.316 +                    out.writeByte(0); // write pad byte
   1.317 +                }
   1.318 +                finished = true;
   1.319 +            }
   1.320 +        }
   1.321 +
   1.322 +        @Override
   1.323 +        public long size() {
   1.324 +            long length = 12;
   1.325 +            for (Chunk child : children) {
   1.326 +                length += child.size() + child.size() % 2;
   1.327 +            }
   1.328 +            return length;
   1.329 +        }
   1.330 +    }
   1.331 +
   1.332 +    /**
   1.333 +     * Data Chunk.
   1.334 +     */
   1.335 +    private class DataChunk extends Chunk {
   1.336 +
   1.337 +        private DataChunkOutputStream data;
   1.338 +        private boolean finished;
   1.339 +
   1.340 +        /**
   1.341 +         * Creates a new DataChunk at the current position of the
   1.342 +         * ImageOutputStream.
   1.343 +         * @param chunkType The chunkType of the chunk.
   1.344 +         */
   1.345 +        public DataChunk(String name) throws IOException {
   1.346 +            super(name);
   1.347 +            out.writeLong(0); // make room for the chunk header
   1.348 +            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
   1.349 +        }
   1.350 +
   1.351 +        public DataChunkOutputStream getOutputStream() {
   1.352 +            if (finished) {
   1.353 +                throw new IllegalStateException("DataChunk is finished");
   1.354 +            }
   1.355 +            return data;
   1.356 +        }
   1.357 +
   1.358 +        /**
   1.359 +         * Returns the offset of this chunk to the beginning of the random access file
   1.360 +         * @return
   1.361 +         */
   1.362 +        public long getOffset() {
   1.363 +            return offset;
   1.364 +        }
   1.365 +
   1.366 +        @Override
   1.367 +        public void finish() throws IOException {
   1.368 +            if (!finished) {
   1.369 +                long sizeBefore = size();
   1.370 +
   1.371 +                if (size() > 0xffffffffL) {
   1.372 +                    throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
   1.373 +                }
   1.374 +
   1.375 +                long pointer = getRelativeStreamPosition();
   1.376 +                seekRelative(offset);
   1.377 +
   1.378 +                DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   1.379 +                headerData.writeType(chunkType);
   1.380 +                headerData.writeUInt(size() - 8);
   1.381 +                seekRelative(pointer);
   1.382 +                if (size() % 2 == 1) {
   1.383 +                    out.writeByte(0); // write pad byte
   1.384 +                }
   1.385 +                finished = true;
   1.386 +                long sizeAfter = size();
   1.387 +                if (sizeBefore != sizeAfter) {
   1.388 +                    System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
   1.389 +                }
   1.390 +            }
   1.391 +        }
   1.392 +
   1.393 +        @Override
   1.394 +        public long size() {
   1.395 +            return 8 + data.size();
   1.396 +        }
   1.397 +    }
   1.398 +
   1.399 +    /**
   1.400 +     * A DataChunk with a fixed size.
   1.401 +     */
   1.402 +    private class FixedSizeDataChunk extends Chunk {
   1.403 +
   1.404 +        private DataChunkOutputStream data;
   1.405 +        private boolean finished;
   1.406 +        private long fixedSize;
   1.407 +
   1.408 +        /**
   1.409 +         * Creates a new DataChunk at the current position of the
   1.410 +         * ImageOutputStream.
   1.411 +         * @param chunkType The chunkType of the chunk.
   1.412 +         */
   1.413 +        public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
   1.414 +            super(chunkType);
   1.415 +            this.fixedSize = fixedSize;
   1.416 +            data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
   1.417 +            data.writeType(chunkType);
   1.418 +            data.writeUInt(fixedSize);
   1.419 +            data.clearCount();
   1.420 +
   1.421 +            // Fill fixed size with nulls
   1.422 +            byte[] buf = new byte[(int) Math.min(512, fixedSize)];
   1.423 +            long written = 0;
   1.424 +            while (written < fixedSize) {
   1.425 +                data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
   1.426 +                written += Math.min(buf.length, fixedSize - written);
   1.427 +            }
   1.428 +            if (fixedSize % 2 == 1) {
   1.429 +                out.writeByte(0); // write pad byte
   1.430 +            }
   1.431 +            seekToStartOfData();
   1.432 +        }
   1.433 +
   1.434 +        public DataChunkOutputStream getOutputStream() {
   1.435 +            /*if (finished) {
   1.436 +            throw new IllegalStateException("DataChunk is finished");
   1.437 +            }*/
   1.438 +            return data;
   1.439 +        }
   1.440 +
   1.441 +        /**
   1.442 +         * Returns the offset of this chunk to the beginning of the random access file
   1.443 +         * @return
   1.444 +         */
   1.445 +        public long getOffset() {
   1.446 +            return offset;
   1.447 +        }
   1.448 +
   1.449 +        public void seekToStartOfData() throws IOException {
   1.450 +            seekRelative(offset + 8);
   1.451 +            data.clearCount();
   1.452 +        }
   1.453 +
   1.454 +        public void seekToEndOfChunk() throws IOException {
   1.455 +            seekRelative(offset + 8 + fixedSize + fixedSize % 2);
   1.456 +        }
   1.457 +
   1.458 +        @Override
   1.459 +        public void finish() throws IOException {
   1.460 +            if (!finished) {
   1.461 +                finished = true;
   1.462 +            }
   1.463 +        }
   1.464 +
   1.465 +        @Override
   1.466 +        public long size() {
   1.467 +            return 8 + fixedSize;
   1.468 +        }
   1.469 +    }
   1.470 +
   1.471 +    /**
   1.472 +     * Creates a new AVI file with the specified video format and
   1.473 +     * frame rate. The video has 24 bits per pixel.
   1.474 +     *
   1.475 +     * @param file the output file
   1.476 +     * @param format Selects an encoder for the video format.
   1.477 +     * @param bitsPerPixel the number of bits per pixel.
   1.478 +     * @exception IllegalArgumentException if videoFormat is null or if
   1.479 +     * frame rate is <= 0
   1.480 +     */
   1.481 +    public AVIOutputStream(File file, VideoFormat format) throws IOException {
   1.482 +        this(file,format,24);
   1.483 +    }
   1.484 +    /**
   1.485 +     * Creates a new AVI file with the specified video format and
   1.486 +     * frame rate.
   1.487 +     *
   1.488 +     * @param file the output file
   1.489 +     * @param format Selects an encoder for the video format.
   1.490 +     * @param bitsPerPixel the number of bits per pixel.
   1.491 +     * @exception IllegalArgumentException if videoFormat is null or if
   1.492 +     * frame rate is <= 0
   1.493 +     */
   1.494 +    public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
   1.495 +        if (format == null) {
   1.496 +            throw new IllegalArgumentException("format must not be null");
   1.497 +        }
   1.498 +
   1.499 +        if (file.exists()) {
   1.500 +            file.delete();
   1.501 +        }
   1.502 +        this.out = new FileImageOutputStream(file);
   1.503 +        this.streamOffset = 0;
   1.504 +        this.videoFormat = format;
   1.505 +        this.videoFrames = new LinkedList<Sample>();
   1.506 +        this.imgDepth = bitsPerPixel;
   1.507 +        if (imgDepth == 4) {
   1.508 +            byte[] gray = new byte[16];
   1.509 +            for (int i = 0; i < gray.length; i++) {
   1.510 +                gray[i] = (byte) ((i << 4) | i);
   1.511 +            }
   1.512 +            palette = new IndexColorModel(4, 16, gray, gray, gray);
   1.513 +        } else if (imgDepth == 8) {
   1.514 +            byte[] gray = new byte[256];
   1.515 +            for (int i = 0; i < gray.length; i++) {
   1.516 +                gray[i] = (byte) i;
   1.517 +            }
   1.518 +            palette = new IndexColorModel(8, 256, gray, gray, gray);
   1.519 +        }
   1.520 +
   1.521 +    }
   1.522 +
   1.523 +    /**
   1.524 +     * Creates a new AVI output stream with the specified video format and
   1.525 +     * framerate.
   1.526 +     *
   1.527 +     * @param out the underlying output stream
   1.528 +     * @param format Selects an encoder for the video format.
   1.529 +     * @exception IllegalArgumentException if videoFormat is null or if
   1.530 +     * framerate is <= 0
   1.531 +     */
   1.532 +    public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
   1.533 +        if (format == null) {
   1.534 +            throw new IllegalArgumentException("format must not be null");
   1.535 +        }
   1.536 +        this.out = out;
   1.537 +        this.streamOffset = out.getStreamPosition();
   1.538 +        this.videoFormat = format;
   1.539 +        this.videoFrames = new LinkedList<Sample>();
   1.540 +    }
   1.541 +
   1.542 +    /**
   1.543 +     * Used with frameRate to specify the time scale that this stream will use.
   1.544 +     * Dividing frameRate by timeScale gives the number of samples per second.
   1.545 +     * For video streams, this is the frame rate. For audio streams, this rate
   1.546 +     * corresponds to the time needed to play nBlockAlign bytes of audio, which
   1.547 +     * for PCM audio is the just the sample rate.
   1.548 +     * <p>
   1.549 +     * The default value is 1.
   1.550 +     *
   1.551 +     * @param newValue
   1.552 +     */
   1.553 +    public void setTimeScale(int newValue) {
   1.554 +        if (newValue <= 0) {
   1.555 +            throw new IllegalArgumentException("timeScale must be greater 0");
   1.556 +        }
   1.557 +        this.timeScale = newValue;
   1.558 +    }
   1.559 +
   1.560 +    /**
   1.561 +     * Returns the time scale of this media.
   1.562 +     *
   1.563 +     * @return time scale
   1.564 +     */
   1.565 +    public int getTimeScale() {
   1.566 +        return timeScale;
   1.567 +    }
   1.568 +
   1.569 +    /**
   1.570 +     * Sets the rate of video frames in time scale units.
   1.571 +     * <p>
   1.572 +     * The default value is 30. Together with the default value 1 of timeScale
   1.573 +     * this results in 30 frames pers second.
   1.574 +     *
   1.575 +     * @param newValue
   1.576 +     */
   1.577 +    public void setFrameRate(int newValue) {
   1.578 +        if (newValue <= 0) {
   1.579 +            throw new IllegalArgumentException("frameDuration must be greater 0");
   1.580 +        }
   1.581 +        if (state == States.STARTED) {
   1.582 +            throw new IllegalStateException("frameDuration must be set before the first frame is written");
   1.583 +        }
   1.584 +        this.frameRate = newValue;
   1.585 +    }
   1.586 +
   1.587 +    /**
   1.588 +     * Returns the frame rate of this media.
   1.589 +     *
   1.590 +     * @return frame rate
   1.591 +     */
   1.592 +    public int getFrameRate() {
   1.593 +        return frameRate;
   1.594 +    }
   1.595 +
   1.596 +    /** Sets the global color palette. */
   1.597 +    public void setPalette(IndexColorModel palette) {
   1.598 +        this.palette = palette;
   1.599 +    }
   1.600 +
   1.601 +    /**
   1.602 +     * Sets the compression quality of the video track.
   1.603 +     * A value of 0 stands for "high compression is important" a value of
   1.604 +     * 1 for "high image quality is important".
   1.605 +     * <p>
   1.606 +     * Changing this value affects frames which are subsequently written
   1.607 +     * to the AVIOutputStream. Frames which have already been written
   1.608 +     * are not changed.
   1.609 +     * <p>
   1.610 +     * This value has only effect on videos encoded with JPG format.
   1.611 +     * <p>
   1.612 +     * The default value is 0.9.
   1.613 +     *
   1.614 +     * @param newValue
   1.615 +     */
   1.616 +    public void setVideoCompressionQuality(float newValue) {
   1.617 +        this.quality = newValue;
   1.618 +    }
   1.619 +
   1.620 +    /**
   1.621 +     * Returns the video compression quality.
   1.622 +     *
   1.623 +     * @return video compression quality
   1.624 +     */
   1.625 +    public float getVideoCompressionQuality() {
   1.626 +        return quality;
   1.627 +    }
   1.628 +
   1.629 +    /**
   1.630 +     * Sets the dimension of the video track.
   1.631 +     * <p>
   1.632 +     * You need to explicitly set the dimension, if you add all frames from
   1.633 +     * files or input streams.
   1.634 +     * <p>
   1.635 +     * If you add frames from buffered images, then AVIOutputStream
   1.636 +     * can determine the video dimension from the image width and height.
   1.637 +     *
   1.638 +     * @param width Must be greater than 0.
   1.639 +     * @param height Must be greater than 0.
   1.640 +     */
   1.641 +    public void setVideoDimension(int width, int height) {
   1.642 +        if (width < 1 || height < 1) {
   1.643 +            throw new IllegalArgumentException("width and height must be greater zero.");
   1.644 +        }
   1.645 +        this.imgWidth = width;
   1.646 +        this.imgHeight = height;
   1.647 +    }
   1.648 +
   1.649 +    /**
   1.650 +     * Gets the dimension of the video track.
   1.651 +     * <p>
   1.652 +     * Returns null if the dimension is not known.
   1.653 +     */
   1.654 +    public Dimension getVideoDimension() {
   1.655 +        if (imgWidth < 1 || imgHeight < 1) {
   1.656 +            return null;
   1.657 +        }
   1.658 +        return new Dimension(imgWidth, imgHeight);
   1.659 +    }
   1.660 +
   1.661 +    /**
   1.662 +     * Sets the state of the QuickTimeOutpuStream to started.
   1.663 +     * <p>
   1.664 +     * If the state is changed by this method, the prolog is
   1.665 +     * written.
   1.666 +     */
   1.667 +    private void ensureStarted() throws IOException {
   1.668 +        if (state != States.STARTED) {
   1.669 +            creationTime = new Date();
   1.670 +            writeProlog();
   1.671 +            state = States.STARTED;
   1.672 +        }
   1.673 +    }
   1.674 +
   1.675 +    /**
   1.676 +     * Writes a frame to the video track.
   1.677 +     * <p>
   1.678 +     * If the dimension of the video track has not been specified yet, it
   1.679 +     * is derived from the first buffered image added to the AVIOutputStream.
   1.680 +     *
   1.681 +     * @param image The frame image.
   1.682 +     *
   1.683 +     * @throws IllegalArgumentException if the duration is less than 1, or
   1.684 +     * if the dimension of the frame does not match the dimension of the video
   1.685 +     * track.
   1.686 +     * @throws IOException if writing the image failed.
   1.687 +     */
   1.688 +    public void writeFrame(BufferedImage image) throws IOException {
   1.689 +        ensureOpen();
   1.690 +        ensureStarted();
   1.691 +
   1.692 +        // Get the dimensions of the first image
   1.693 +        if (imgWidth == -1) {
   1.694 +            imgWidth = image.getWidth();
   1.695 +            imgHeight = image.getHeight();
   1.696 +        } else {
   1.697 +            // The dimension of the image must match the dimension of the video track
   1.698 +            if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
   1.699 +                throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
   1.700 +                        + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
   1.701 +                        + ") differs from image[0] (width="
   1.702 +                        + imgWidth + ", height=" + imgHeight);
   1.703 +            }
   1.704 +        }
   1.705 +
   1.706 +        DataChunk videoFrameChunk;
   1.707 +        long offset = getRelativeStreamPosition();
   1.708 +        boolean isSync = true;
   1.709 +        switch (videoFormat) {
   1.710 +            case RAW: {
   1.711 +                switch (imgDepth) {
   1.712 +                    case 4: {
   1.713 +                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
   1.714 +                        int[] imgRGBs = new int[16];
   1.715 +                        imgPalette.getRGBs(imgRGBs);
   1.716 +                        int[] previousRGBs = new int[16];
   1.717 +                        if (previousPalette == null) {
   1.718 +                            previousPalette = palette;
   1.719 +                        }
   1.720 +                        previousPalette.getRGBs(previousRGBs);
   1.721 +                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
   1.722 +                            previousPalette = imgPalette;
   1.723 +                            DataChunk paletteChangeChunk = new DataChunk("00pc");
   1.724 +                            /*
   1.725 +                            int first = imgPalette.getMapSize();
   1.726 +                            int last = -1;
   1.727 +                            for (int i = 0; i < 16; i++) {
   1.728 +                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
   1.729 +                            first = i;
   1.730 +                            }
   1.731 +                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
   1.732 +                            last = i;
   1.733 +                            }
   1.734 +                            }*/
   1.735 +                            int first = 0;
   1.736 +                            int last = imgPalette.getMapSize() - 1;
   1.737 +                            /*
   1.738 +                             * typedef struct {
   1.739 +                            BYTE         bFirstEntry;
   1.740 +                            BYTE         bNumEntries;
   1.741 +                            WORD         wFlags;
   1.742 +                            PALETTEENTRY peNew[];
   1.743 +                            } AVIPALCHANGE;
   1.744 +                             *
   1.745 +                             * typedef struct tagPALETTEENTRY {
   1.746 +                            BYTE peRed;
   1.747 +                            BYTE peGreen;
   1.748 +                            BYTE peBlue;
   1.749 +                            BYTE peFlags;
   1.750 +                            } PALETTEENTRY;
   1.751 +                             */
   1.752 +                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
   1.753 +                            pOut.writeByte(first);//bFirstEntry
   1.754 +                            pOut.writeByte(last - first + 1);//bNumEntries
   1.755 +                            pOut.writeShort(0);//wFlags
   1.756 +
   1.757 +                            for (int i = first; i <= last; i++) {
   1.758 +                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
   1.759 +                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
   1.760 +                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
   1.761 +                                pOut.writeByte(0); // reserved*/
   1.762 +                            }
   1.763 +
   1.764 +                            moviChunk.add(paletteChangeChunk);
   1.765 +                            paletteChangeChunk.finish();
   1.766 +                            long length = getRelativeStreamPosition() - offset;
   1.767 +                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
   1.768 +                            offset = getRelativeStreamPosition();
   1.769 +                        }
   1.770 +
   1.771 +                        videoFrameChunk = new DataChunk("00db");
   1.772 +                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
   1.773 +                        byte[] rgb4 = new byte[imgWidth / 2];
   1.774 +                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
   1.775 +                            for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
   1.776 +                                rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
   1.777 +                            }
   1.778 +                            videoFrameChunk.getOutputStream().write(rgb4);
   1.779 +                        }
   1.780 +                        break;
   1.781 +                    }
   1.782 +                    case 8: {
   1.783 +                        IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
   1.784 +                        int[] imgRGBs = new int[256];
   1.785 +                        imgPalette.getRGBs(imgRGBs);
   1.786 +                        int[] previousRGBs = new int[256];
   1.787 +                        if (previousPalette == null) {
   1.788 +                            previousPalette = palette;
   1.789 +                        }
   1.790 +                        previousPalette.getRGBs(previousRGBs);
   1.791 +                        if (!Arrays.equals(imgRGBs, previousRGBs)) {
   1.792 +                            previousPalette = imgPalette;
   1.793 +                            DataChunk paletteChangeChunk = new DataChunk("00pc");
   1.794 +                            /*
   1.795 +                            int first = imgPalette.getMapSize();
   1.796 +                            int last = -1;
   1.797 +                            for (int i = 0; i < 16; i++) {
   1.798 +                            if (previousRGBs[i] != imgRGBs[i] && i < first) {
   1.799 +                            first = i;
   1.800 +                            }
   1.801 +                            if (previousRGBs[i] != imgRGBs[i] && i > last) {
   1.802 +                            last = i;
   1.803 +                            }
   1.804 +                            }*/
   1.805 +                            int first = 0;
   1.806 +                            int last = imgPalette.getMapSize() - 1;
   1.807 +                            /*
   1.808 +                             * typedef struct {
   1.809 +                            BYTE         bFirstEntry;
   1.810 +                            BYTE         bNumEntries;
   1.811 +                            WORD         wFlags;
   1.812 +                            PALETTEENTRY peNew[];
   1.813 +                            } AVIPALCHANGE;
   1.814 +                             *
   1.815 +                             * typedef struct tagPALETTEENTRY {
   1.816 +                            BYTE peRed;
   1.817 +                            BYTE peGreen;
   1.818 +                            BYTE peBlue;
   1.819 +                            BYTE peFlags;
   1.820 +                            } PALETTEENTRY;
   1.821 +                             */
   1.822 +                            DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
   1.823 +                            pOut.writeByte(first);//bFirstEntry
   1.824 +                            pOut.writeByte(last - first + 1);//bNumEntries
   1.825 +                            pOut.writeShort(0);//wFlags
   1.826 +
   1.827 +                            for (int i = first; i <= last; i++) {
   1.828 +                                pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
   1.829 +                                pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
   1.830 +                                pOut.writeByte(imgRGBs[i] & 0xff); // blue
   1.831 +                                pOut.writeByte(0); // reserved*/
   1.832 +                            }
   1.833 +
   1.834 +                            moviChunk.add(paletteChangeChunk);
   1.835 +                            paletteChangeChunk.finish();
   1.836 +                            long length = getRelativeStreamPosition() - offset;
   1.837 +                            videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
   1.838 +                            offset = getRelativeStreamPosition();
   1.839 +                        }
   1.840 +
   1.841 +                        videoFrameChunk = new DataChunk("00db");
   1.842 +                        byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
   1.843 +                        for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
   1.844 +                            videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
   1.845 +                        }
   1.846 +                        break;
   1.847 +                    }
   1.848 +                    default: {
   1.849 +                        videoFrameChunk = new DataChunk("00db");
   1.850 +                        WritableRaster raster = image.getRaster();
   1.851 +                        int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
   1.852 +                        byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
   1.853 +                        for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
   1.854 +                            raster.getPixels(0, y, imgWidth, 1, raw);
   1.855 +                            for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
   1.856 +                                bytes[x + 2] = (byte) raw[x]; // Blue
   1.857 +                                bytes[x + 1] = (byte) raw[x + 1]; // Green
   1.858 +                                bytes[x] = (byte) raw[x + 2]; // Red
   1.859 +                            }
   1.860 +                            videoFrameChunk.getOutputStream().write(bytes);
   1.861 +                        }
   1.862 +                        break;
   1.863 +                    }
   1.864 +                }
   1.865 +                break;
   1.866 +            }
   1.867 +            
   1.868 +            case JPG: {
   1.869 +                videoFrameChunk = new DataChunk("00dc");
   1.870 +                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
   1.871 +                ImageWriteParam iwParam = iw.getDefaultWriteParam();
   1.872 +                iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
   1.873 +                iwParam.setCompressionQuality(quality);
   1.874 +                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
   1.875 +                iw.setOutput(imgOut);
   1.876 +                IIOImage img = new IIOImage(image, null, null);
   1.877 +                iw.write(null, img, iwParam);
   1.878 +                iw.dispose();
   1.879 +                break;
   1.880 +            }
   1.881 +            case PNG:
   1.882 +            default: {
   1.883 +                videoFrameChunk = new DataChunk("00dc");
   1.884 +                ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
   1.885 +                ImageWriteParam iwParam = iw.getDefaultWriteParam();
   1.886 +                MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
   1.887 +                iw.setOutput(imgOut);
   1.888 +                IIOImage img = new IIOImage(image, null, null);
   1.889 +                iw.write(null, img, iwParam);
   1.890 +                iw.dispose();
   1.891 +                break;
   1.892 +            }
   1.893 +        }
   1.894 +        long length = getRelativeStreamPosition() - offset;
   1.895 +        moviChunk.add(videoFrameChunk);
   1.896 +        videoFrameChunk.finish();
   1.897 +
   1.898 +        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
   1.899 +        if (getRelativeStreamPosition() > 1L << 32) {
   1.900 +            throw new IOException("AVI file is larger than 4 GB");
   1.901 +        }
   1.902 +    }
   1.903 +
   1.904 +    /**
   1.905 +     * Writes a frame from a file to the video track.
   1.906 +     * <p>
   1.907 +     * This method does not inspect the contents of the file.
   1.908 +     * For example, Its your responsibility to only add JPG files if you have
   1.909 +     * chosen the JPEG video format.
   1.910 +     * <p>
   1.911 +     * If you add all frames from files or from input streams, then you
   1.912 +     * have to explicitly set the dimension of the video track before you
   1.913 +     * call finish() or close().
   1.914 +     *
   1.915 +     * @param file The file which holds the image data.
   1.916 +     *
   1.917 +     * @throws IllegalStateException if the duration is less than 1.
   1.918 +     * @throws IOException if writing the image failed.
   1.919 +     */
   1.920 +    public void writeFrame(File file) throws IOException {
   1.921 +        FileInputStream in = null;
   1.922 +        try {
   1.923 +            in = new FileInputStream(file);
   1.924 +            writeFrame(in);
   1.925 +        } finally {
   1.926 +            if (in != null) {
   1.927 +                in.close();
   1.928 +            }
   1.929 +        }
   1.930 +    }
   1.931 +
   1.932 +    /**
   1.933 +     * Writes a frame to the video track.
   1.934 +     * <p>
   1.935 +     * This method does not inspect the contents of the file.
   1.936 +     * For example, its your responsibility to only add JPG files if you have
   1.937 +     * chosen the JPEG video format.
   1.938 +     * <p>
   1.939 +     * If you add all frames from files or from input streams, then you
   1.940 +     * have to explicitly set the dimension of the video track before you
   1.941 +     * call finish() or close().
   1.942 +     *
   1.943 +     * @param in The input stream which holds the image data.
   1.944 +     *
   1.945 +     * @throws IllegalArgumentException if the duration is less than 1.
   1.946 +     * @throws IOException if writing the image failed.
   1.947 +     */
   1.948 +    public void writeFrame(InputStream in) throws IOException {
   1.949 +        ensureOpen();
   1.950 +        ensureStarted();
   1.951 +
   1.952 +        DataChunk videoFrameChunk = new DataChunk(
   1.953 +                videoFormat == VideoFormat.RAW ? "00db" : "00dc");
   1.954 +        moviChunk.add(videoFrameChunk);
   1.955 +        OutputStream mdatOut = videoFrameChunk.getOutputStream();
   1.956 +        long offset = getRelativeStreamPosition();
   1.957 +        byte[] buf = new byte[512];
   1.958 +        int len;
   1.959 +        while ((len = in.read(buf)) != -1) {
   1.960 +            mdatOut.write(buf, 0, len);
   1.961 +        }
   1.962 +        long length = getRelativeStreamPosition() - offset;
   1.963 +        videoFrameChunk.finish();
   1.964 +        videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
   1.965 +        if (getRelativeStreamPosition() > 1L << 32) {
   1.966 +            throw new IOException("AVI file is larger than 4 GB");
   1.967 +        }
   1.968 +    }
   1.969 +
   1.970 +    /**
   1.971 +     * Closes the movie file as well as the stream being filtered.
   1.972 +     *
   1.973 +     * @exception IOException if an I/O error has occurred
   1.974 +     */
   1.975 +    public void close() throws IOException {
   1.976 +        if (state == States.STARTED) {
   1.977 +            finish();
   1.978 +        }
   1.979 +        if (state != States.CLOSED) {
   1.980 +            out.close();
   1.981 +            state = States.CLOSED;
   1.982 +        }
   1.983 +    }
   1.984 +
   1.985 +    /**
   1.986 +     * Finishes writing the contents of the AVI output stream without closing
   1.987 +     * the underlying stream. Use this method when applying multiple filters
   1.988 +     * in succession to the same output stream.
   1.989 +     *
   1.990 +     * @exception IllegalStateException if the dimension of the video track
   1.991 +     * has not been specified or determined yet.
   1.992 +     * @exception IOException if an I/O exception has occurred
   1.993 +     */
   1.994 +    public void finish() throws IOException {
   1.995 +        ensureOpen();
   1.996 +        if (state != States.FINISHED) {
   1.997 +            if (imgWidth == -1 || imgHeight == -1) {
   1.998 +                throw new IllegalStateException("image width and height must be specified");
   1.999 +            }
  1.1000 +
  1.1001 +            moviChunk.finish();
  1.1002 +            writeEpilog();
  1.1003 +            state = States.FINISHED;
  1.1004 +            imgWidth = imgHeight = -1;
  1.1005 +        }
  1.1006 +    }
  1.1007 +
  1.1008 +    /**
  1.1009 +     * Check to make sure that this stream has not been closed
  1.1010 +     */
  1.1011 +    private void ensureOpen() throws IOException {
  1.1012 +        if (state == States.CLOSED) {
  1.1013 +            throw new IOException("Stream closed");
  1.1014 +        }
  1.1015 +    }
  1.1016 +
  1.1017 +    /** Gets the position relative to the beginning of the QuickTime stream.
  1.1018 +     * <p>
  1.1019 +     * Usually this value is equal to the stream position of the underlying
  1.1020 +     * ImageOutputStream, but can be larger if the underlying stream already
  1.1021 +     * contained data.
  1.1022 +     *
  1.1023 +     * @return The relative stream position.
  1.1024 +     * @throws IOException
  1.1025 +     */
  1.1026 +    private long getRelativeStreamPosition() throws IOException {
  1.1027 +        return out.getStreamPosition() - streamOffset;
  1.1028 +    }
  1.1029 +
  1.1030 +    /** Seeks relative to the beginning of the QuickTime stream.
  1.1031 +     * <p>
  1.1032 +     * Usually this equal to seeking in the underlying ImageOutputStream, but
  1.1033 +     * can be different if the underlying stream already contained data.
  1.1034 +     *
  1.1035 +     */
  1.1036 +    private void seekRelative(long newPosition) throws IOException {
  1.1037 +        out.seek(newPosition + streamOffset);
  1.1038 +    }
  1.1039 +
  1.1040 +    private void writeProlog() throws IOException {
  1.1041 +        // The file has the following structure:
  1.1042 +        //
  1.1043 +        // .RIFF AVI
  1.1044 +        // ..avih (AVI Header Chunk)
  1.1045 +        // ..LIST strl
  1.1046 +        // ...strh (Stream Header Chunk)
  1.1047 +        // ...strf (Stream Format Chunk)
  1.1048 +        // ..LIST movi
  1.1049 +        // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
  1.1050 +        // ..idx1 (List of video data chunks and their location in the file)
  1.1051 +
  1.1052 +        // The RIFF AVI Chunk holds the complete movie
  1.1053 +        aviChunk = new CompositeChunk("RIFF", "AVI ");
  1.1054 +        CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
  1.1055 +
  1.1056 +        // Write empty AVI Main Header Chunk - we fill the data in later
  1.1057 +        aviChunk.add(hdrlChunk);
  1.1058 +        avihChunk = new FixedSizeDataChunk("avih", 56);
  1.1059 +        avihChunk.seekToEndOfChunk();
  1.1060 +        hdrlChunk.add(avihChunk);
  1.1061 +
  1.1062 +        CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
  1.1063 +        hdrlChunk.add(strlChunk);
  1.1064 +
  1.1065 +        // Write empty AVI Stream Header Chunk - we fill the data in later
  1.1066 +        strhChunk = new FixedSizeDataChunk("strh", 56);
  1.1067 +        strhChunk.seekToEndOfChunk();
  1.1068 +        strlChunk.add(strhChunk);
  1.1069 +        strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
  1.1070 +        strfChunk.seekToEndOfChunk();
  1.1071 +        strlChunk.add(strfChunk);
  1.1072 +
  1.1073 +        moviChunk = new CompositeChunk("LIST", "movi");
  1.1074 +        aviChunk.add(moviChunk);
  1.1075 +
  1.1076 +
  1.1077 +    }
  1.1078 +
  1.1079 +    private void writeEpilog() throws IOException {
  1.1080 +        // Compute values
  1.1081 +        int duration = 0;
  1.1082 +        for (Sample s : videoFrames) {
  1.1083 +            duration += s.duration;
  1.1084 +        }
  1.1085 +        long bufferSize = 0;
  1.1086 +        for (Sample s : videoFrames) {
  1.1087 +            if (s.length > bufferSize) {
  1.1088 +                bufferSize = s.length;
  1.1089 +            }
  1.1090 +        }
  1.1091 +
  1.1092 +
  1.1093 +        DataChunkOutputStream d;
  1.1094 +
  1.1095 +        /* Create Idx1 Chunk and write data
  1.1096 +         * -------------
  1.1097 +        typedef struct _avioldindex {
  1.1098 +        FOURCC  fcc;
  1.1099 +        DWORD   cb;
  1.1100 +        struct _avioldindex_entry {
  1.1101 +        DWORD   dwChunkId;
  1.1102 +        DWORD   dwFlags;
  1.1103 +        DWORD   dwOffset;
  1.1104 +        DWORD   dwSize;
  1.1105 +        } aIndex[];
  1.1106 +        } AVIOLDINDEX;
  1.1107 +         */
  1.1108 +        DataChunk idx1Chunk = new DataChunk("idx1");
  1.1109 +        aviChunk.add(idx1Chunk);
  1.1110 +        d = idx1Chunk.getOutputStream();
  1.1111 +        long moviListOffset = moviChunk.offset + 8;
  1.1112 +        //moviListOffset = 0;
  1.1113 +        for (Sample f : videoFrames) {
  1.1114 +
  1.1115 +            d.writeType(f.chunkType); // dwChunkId
  1.1116 +            // Specifies a FOURCC that identifies a stream in the AVI file. The
  1.1117 +            // FOURCC must have the form 'xxyy' where xx is the stream number and yy
  1.1118 +            // is a two-character code that identifies the contents of the stream:
  1.1119 +            //
  1.1120 +            // Two-character code   Description
  1.1121 +            //  db                  Uncompressed video frame
  1.1122 +            //  dc                  Compressed video frame
  1.1123 +            //  pc                  Palette change
  1.1124 +            //  wb                  Audio data
  1.1125 +
  1.1126 +            d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
  1.1127 +                    | (f.isSync ? 0x10 : 0x0)); // dwFlags
  1.1128 +            // Specifies a bitwise combination of zero or more of the following
  1.1129 +            // flags:
  1.1130 +            //
  1.1131 +            // Value    Name            Description
  1.1132 +            // 0x10     AVIIF_KEYFRAME  The data chunk is a key frame.
  1.1133 +            // 0x1      AVIIF_LIST      The data chunk is a 'rec ' list.
  1.1134 +            // 0x100    AVIIF_NO_TIME   The data chunk does not affect the timing of the
  1.1135 +            //                          stream. For example, this flag should be set for
  1.1136 +            //                          palette changes.
  1.1137 +
  1.1138 +            d.writeUInt(f.offset - moviListOffset); // dwOffset
  1.1139 +            // Specifies the location of the data chunk in the file. The value
  1.1140 +            // should be specified as an offset, in bytes, from the start of the
  1.1141 +            // 'movi' list; however, in some AVI files it is given as an offset from
  1.1142 +            // the start of the file.
  1.1143 +
  1.1144 +            d.writeUInt(f.length); // dwSize
  1.1145 +            // Specifies the size of the data chunk, in bytes.
  1.1146 +        }
  1.1147 +        idx1Chunk.finish();
  1.1148 +
  1.1149 +        /* Write Data into AVI Main Header Chunk
  1.1150 +         * -------------
  1.1151 +         * The AVIMAINHEADER structure defines global information in an AVI file.
  1.1152 +         * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
  1.1153 +        typedef struct _avimainheader {
  1.1154 +        FOURCC fcc;
  1.1155 +        DWORD  cb;
  1.1156 +        DWORD  dwMicroSecPerFrame;
  1.1157 +        DWORD  dwMaxBytesPerSec;
  1.1158 +        DWORD  dwPaddingGranularity;
  1.1159 +        DWORD  dwFlags;
  1.1160 +        DWORD  dwTotalFrames;
  1.1161 +        DWORD  dwInitialFrames;
  1.1162 +        DWORD  dwStreams;
  1.1163 +        DWORD  dwSuggestedBufferSize;
  1.1164 +        DWORD  dwWidth;
  1.1165 +        DWORD  dwHeight;
  1.1166 +        DWORD  dwReserved[4];
  1.1167 +        } AVIMAINHEADER; */
  1.1168 +        avihChunk.seekToStartOfData();
  1.1169 +        d = avihChunk.getOutputStream();
  1.1170 +
  1.1171 +        d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
  1.1172 +        // Specifies the number of microseconds between frames.
  1.1173 +        // This value indicates the overall timing for the file.
  1.1174 +
  1.1175 +        d.writeUInt(0); // dwMaxBytesPerSec
  1.1176 +        // Specifies the approximate maximum data rate of the file.
  1.1177 +        // This value indicates the number of bytes per second the system
  1.1178 +        // must handle to present an AVI sequence as specified by the other
  1.1179 +        // parameters contained in the main header and stream header chunks.
  1.1180 +
  1.1181 +        d.writeUInt(0); // dwPaddingGranularity
  1.1182 +        // Specifies the alignment for data, in bytes. Pad the data to multiples
  1.1183 +        // of this value.
  1.1184 +
  1.1185 +        d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
  1.1186 +        // Contains a bitwise combination of zero or more of the following
  1.1187 +        // flags:
  1.1188 +        //
  1.1189 +        // Value   Name         Description
  1.1190 +        // 0x10    AVIF_HASINDEX Indicates the AVI file has an index.
  1.1191 +        // 0x20    AVIF_MUSTUSEINDEX Indicates that application should use the
  1.1192 +        //                      index, rather than the physical ordering of the
  1.1193 +        //                      chunks in the file, to determine the order of
  1.1194 +        //                      presentation of the data. For example, this flag
  1.1195 +        //                      could be used to create a list of frames for
  1.1196 +        //                      editing.
  1.1197 +        // 0x100   AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
  1.1198 +        // 0x1000  AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
  1.1199 +        //                      allocated file used for capturing real-time
  1.1200 +        //                      video. Applications should warn the user before
  1.1201 +        //                      writing over a file with this flag set because
  1.1202 +        //                      the user probably defragmented this file.
  1.1203 +        // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
  1.1204 +        //                      data and software. When this flag is used,
  1.1205 +        //                      software should not permit the data to be
  1.1206 +        //                      duplicated.
  1.1207 +
  1.1208 +        d.writeUInt(videoFrames.size()); // dwTotalFrames
  1.1209 +        // Specifies the total number of frames of data in the file.
  1.1210 +
  1.1211 +        d.writeUInt(0); // dwInitialFrames
  1.1212 +        // Specifies the initial frame for interleaved files. Noninterleaved
  1.1213 +        // files should specify zero. If you are creating interleaved files,
  1.1214 +        // specify the number of frames in the file prior to the initial frame
  1.1215 +        // of the AVI sequence in this member.
  1.1216 +        // To give the audio driver enough audio to work with, the audio data in
  1.1217 +        // an interleaved file must be skewed from the video data. Typically,
  1.1218 +        // the audio data should be moved forward enough frames to allow
  1.1219 +        // approximately 0.75 seconds of audio data to be preloaded. The
  1.1220 +        // dwInitialRecords member should be set to the number of frames the
  1.1221 +        // audio is skewed. Also set the same value for the dwInitialFrames
  1.1222 +        // member of the AVISTREAMHEADER structure in the audio stream header
  1.1223 +
  1.1224 +        d.writeUInt(1); // dwStreams
  1.1225 +        // Specifies the number of streams in the file. For example, a file with
  1.1226 +        // audio and video has two streams.
  1.1227 +
  1.1228 +        d.writeUInt(bufferSize); // dwSuggestedBufferSize
  1.1229 +        // Specifies the suggested buffer size for reading the file. Generally,
  1.1230 +        // this size should be large enough to contain the largest chunk in the
  1.1231 +        // file. If set to zero, or if it is too small, the playback software
  1.1232 +        // will have to reallocate memory during playback, which will reduce
  1.1233 +        // performance. For an interleaved file, the buffer size should be large
  1.1234 +        // enough to read an entire record, and not just a chunk.
  1.1235 +
  1.1236 +
  1.1237 +        d.writeUInt(imgWidth); // dwWidth
  1.1238 +        // Specifies the width of the AVI file in pixels.
  1.1239 +
  1.1240 +        d.writeUInt(imgHeight); // dwHeight
  1.1241 +        // Specifies the height of the AVI file in pixels.
  1.1242 +
  1.1243 +        d.writeUInt(0); // dwReserved[0]
  1.1244 +        d.writeUInt(0); // dwReserved[1]
  1.1245 +        d.writeUInt(0); // dwReserved[2]
  1.1246 +        d.writeUInt(0); // dwReserved[3]
  1.1247 +        // Reserved. Set this array to zero.
  1.1248 +
  1.1249 +        /* Write Data into AVI Stream Header Chunk
  1.1250 +         * -------------
  1.1251 +         * The AVISTREAMHEADER structure contains information about one stream
  1.1252 +         * in an AVI file.
  1.1253 +         * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
  1.1254 +        typedef struct _avistreamheader {
  1.1255 +        FOURCC fcc;
  1.1256 +        DWORD  cb;
  1.1257 +        FOURCC fccType;
  1.1258 +        FOURCC fccHandler;
  1.1259 +        DWORD  dwFlags;
  1.1260 +        WORD   wPriority;
  1.1261 +        WORD   wLanguage;
  1.1262 +        DWORD  dwInitialFrames;
  1.1263 +        DWORD  dwScale;
  1.1264 +        DWORD  dwRate;
  1.1265 +        DWORD  dwStart;
  1.1266 +        DWORD  dwLength;
  1.1267 +        DWORD  dwSuggestedBufferSize;
  1.1268 +        DWORD  dwQuality;
  1.1269 +        DWORD  dwSampleSize;
  1.1270 +        struct {
  1.1271 +        short int left;
  1.1272 +        short int top;
  1.1273 +        short int right;
  1.1274 +        short int bottom;
  1.1275 +        }  rcFrame;
  1.1276 +        } AVISTREAMHEADER;
  1.1277 +         */
  1.1278 +        strhChunk.seekToStartOfData();
  1.1279 +        d = strhChunk.getOutputStream();
  1.1280 +        d.writeType("vids"); // fccType - vids for video stream
  1.1281 +        // Contains a FOURCC that specifies the type of the data contained in
  1.1282 +        // the stream. The following standard AVI values for video and audio are
  1.1283 +        // defined:
  1.1284 +        //
  1.1285 +        // FOURCC   Description
  1.1286 +        // 'auds'   Audio stream
  1.1287 +        // 'mids'   MIDI stream
  1.1288 +        // 'txts'   Text stream
  1.1289 +        // 'vids'   Video stream
  1.1290 +
  1.1291 +        switch (videoFormat) {
  1.1292 +            case RAW:
  1.1293 +                d.writeType("DIB "); // fccHandler - DIB for Raw RGB
  1.1294 +                break;
  1.1295 +            case RLE:
  1.1296 +                d.writeType("RLE "); // fccHandler - Microsoft RLE
  1.1297 +                break;
  1.1298 +            case JPG:
  1.1299 +                d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
  1.1300 +                break;
  1.1301 +            case PNG:
  1.1302 +            default:
  1.1303 +                d.writeType("png "); // fccHandler - png for PNG
  1.1304 +                break;
  1.1305 +        }
  1.1306 +        // Optionally, contains a FOURCC that identifies a specific data
  1.1307 +        // handler. The data handler is the preferred handler for the stream.
  1.1308 +        // For audio and video streams, this specifies the codec for decoding
  1.1309 +        // the stream.
  1.1310 +
  1.1311 +        if (imgDepth <= 8) {
  1.1312 +            d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
  1.1313 +        } else {
  1.1314 +            d.writeUInt(0); // dwFlags
  1.1315 +        }
  1.1316 +
  1.1317 +        // Contains any flags for the data stream. The bits in the high-order
  1.1318 +        // word of these flags are specific to the type of data contained in the
  1.1319 +        // stream. The following standard flags are defined:
  1.1320 +        //
  1.1321 +        // Value    Name        Description
  1.1322 +        //          AVISF_DISABLED 0x00000001 Indicates this stream should not
  1.1323 +        //                      be enabled by default.
  1.1324 +        //          AVISF_VIDEO_PALCHANGES 0x00010000
  1.1325 +        //                      Indicates this video stream contains
  1.1326 +        //                      palette changes. This flag warns the playback
  1.1327 +        //                      software that it will need to animate the
  1.1328 +        //                      palette.
  1.1329 +
  1.1330 +        d.writeUShort(0); // wPriority
  1.1331 +        // Specifies priority of a stream type. For example, in a file with
  1.1332 +        // multiple audio streams, the one with the highest priority might be
  1.1333 +        // the default stream.
  1.1334 +
  1.1335 +        d.writeUShort(0); // wLanguage
  1.1336 +        // Language tag.
  1.1337 +
  1.1338 +        d.writeUInt(0); // dwInitialFrames
  1.1339 +        // Specifies how far audio data is skewed ahead of the video frames in
  1.1340 +        // interleaved files. Typically, this is about 0.75 seconds. If you are
  1.1341 +        // creating interleaved files, specify the number of frames in the file
  1.1342 +        // prior to the initial frame of the AVI sequence in this member. For
  1.1343 +        // more information, see the remarks for the dwInitialFrames member of
  1.1344 +        // the AVIMAINHEADER structure.
  1.1345 +
  1.1346 +        d.writeUInt(timeScale); // dwScale
  1.1347 +        // Used with dwRate to specify the time scale that this stream will use.
  1.1348 +        // Dividing dwRate by dwScale gives the number of samples per second.
  1.1349 +        // For video streams, this is the frame rate. For audio streams, this
  1.1350 +        // rate corresponds to the time needed to play nBlockAlign bytes of
  1.1351 +        // audio, which for PCM audio is the just the sample rate.
  1.1352 +
  1.1353 +        d.writeUInt(frameRate); // dwRate
  1.1354 +        // See dwScale.
  1.1355 +
  1.1356 +        d.writeUInt(0); // dwStart
  1.1357 +        // Specifies the starting time for this stream. The units are defined by
  1.1358 +        // the dwRate and dwScale members in the main file header. Usually, this
  1.1359 +        // is zero, but it can specify a delay time for a stream that does not
  1.1360 +        // start concurrently with the file.
  1.1361 +
  1.1362 +        d.writeUInt(videoFrames.size()); // dwLength
  1.1363 +        // Specifies the length of this stream. The units are defined by the
  1.1364 +        // dwRate and dwScale members of the stream's header.
  1.1365 +
  1.1366 +        d.writeUInt(bufferSize); // dwSuggestedBufferSize
  1.1367 +        // Specifies how large a buffer should be used to read this stream.
  1.1368 +        // Typically, this contains a value corresponding to the largest chunk
  1.1369 +        // present in the stream. Using the correct buffer size makes playback
  1.1370 +        // more efficient. Use zero if you do not know the correct buffer size.
  1.1371 +
  1.1372 +        d.writeInt(-1); // dwQuality
  1.1373 +        // Specifies an indicator of the quality of the data in the stream.
  1.1374 +        // Quality is represented as a number between 0 and 10,000.
  1.1375 +        // For compressed data, this typically represents the value of the
  1.1376 +        // quality parameter passed to the compression software. If set to –1,
  1.1377 +        // drivers use the default quality value.
  1.1378 +
  1.1379 +        d.writeUInt(0); // dwSampleSize
  1.1380 +        // Specifies the size of a single sample of data. This is set to zero
  1.1381 +        // if the samples can vary in size. If this number is nonzero, then
  1.1382 +        // multiple samples of data can be grouped into a single chunk within
  1.1383 +        // the file. If it is zero, each sample of data (such as a video frame)
  1.1384 +        // must be in a separate chunk. For video streams, this number is
  1.1385 +        // typically zero, although it can be nonzero if all video frames are
  1.1386 +        // the same size. For audio streams, this number should be the same as
  1.1387 +        // the nBlockAlign member of the WAVEFORMATEX structure describing the
  1.1388 +        // audio.
  1.1389 +
  1.1390 +        d.writeUShort(0); // rcFrame.left
  1.1391 +        d.writeUShort(0); // rcFrame.top
  1.1392 +        d.writeUShort(imgWidth); // rcFrame.right
  1.1393 +        d.writeUShort(imgHeight); // rcFrame.bottom
  1.1394 +        // Specifies the destination rectangle for a text or video stream within
  1.1395 +        // the movie rectangle specified by the dwWidth and dwHeight members of
  1.1396 +        // the AVI main header structure. The rcFrame member is typically used
  1.1397 +        // in support of multiple video streams. Set this rectangle to the
  1.1398 +        // coordinates corresponding to the movie rectangle to update the whole
  1.1399 +        // movie rectangle. Units for this member are pixels. The upper-left
  1.1400 +        // corner of the destination rectangle is relative to the upper-left
  1.1401 +        // corner of the movie rectangle.
  1.1402 +
  1.1403 +        /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
  1.1404 +        /* -------------
  1.1405 +         * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
  1.1406 +        typedef struct tagBITMAPINFOHEADER {
  1.1407 +        DWORD  biSize;
  1.1408 +        LONG   biWidth;
  1.1409 +        LONG   biHeight;
  1.1410 +        WORD   biPlanes;
  1.1411 +        WORD   biBitCount;
  1.1412 +        DWORD  biCompression;
  1.1413 +        DWORD  biSizeImage;
  1.1414 +        LONG   biXPelsPerMeter;
  1.1415 +        LONG   biYPelsPerMeter;
  1.1416 +        DWORD  biClrUsed;
  1.1417 +        DWORD  biClrImportant;
  1.1418 +        } BITMAPINFOHEADER;
  1.1419 +         */
  1.1420 +        strfChunk.seekToStartOfData();
  1.1421 +        d = strfChunk.getOutputStream();
  1.1422 +        d.writeUInt(40); // biSize
  1.1423 +        // Specifies the number of bytes required by the structure. This value
  1.1424 +        // does not include the size of the color table or the size of the color
  1.1425 +        // masks, if they are appended to the end of structure.
  1.1426 +
  1.1427 +        d.writeInt(imgWidth); // biWidth
  1.1428 +        // Specifies the width of the bitmap, in pixels.
  1.1429 +
  1.1430 +        d.writeInt(imgHeight); // biHeight
  1.1431 +        // Specifies the height of the bitmap, in pixels.
  1.1432 +        //
  1.1433 +        // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
  1.1434 +        // a bottom-up DIB with the origin at the lower left corner. If biHeight
  1.1435 +        // is negative, the bitmap is a top-down DIB with the origin at the
  1.1436 +        // upper left corner.
  1.1437 +        // For YUV bitmaps, the bitmap is always top-down, regardless of the
  1.1438 +        // sign of biHeight. Decoders should offer YUV formats with postive
  1.1439 +        // biHeight, but for backward compatibility they should accept YUV
  1.1440 +        // formats with either positive or negative biHeight.
  1.1441 +        // For compressed formats, biHeight must be positive, regardless of
  1.1442 +        // image orientation.
  1.1443 +
  1.1444 +        d.writeShort(1); // biPlanes
  1.1445 +        // Specifies the number of planes for the target device. This value must
  1.1446 +        // be set to 1.
  1.1447 +
  1.1448 +        d.writeShort(imgDepth); // biBitCount
  1.1449 +        // Specifies the number of bits per pixel (bpp).  For uncompressed
  1.1450 +        // formats, this value is the average number of bits per pixel. For
  1.1451 +        // compressed formats, this value is the implied bit depth of the
  1.1452 +        // uncompressed image, after the image has been decoded.
  1.1453 +
  1.1454 +        switch (videoFormat) {
  1.1455 +            case RAW:
  1.1456 +            default:
  1.1457 +                d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
  1.1458 +                break;
  1.1459 +            case RLE:
  1.1460 +                if (imgDepth == 8) {
  1.1461 +                    d.writeInt(1); // biCompression - BI_RLE8
  1.1462 +                } else if (imgDepth == 4) {
  1.1463 +                    d.writeInt(2); // biCompression - BI_RLE4
  1.1464 +                } else {
  1.1465 +                    throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
  1.1466 +                }
  1.1467 +                break;
  1.1468 +            case JPG:
  1.1469 +                d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
  1.1470 +                break;
  1.1471 +            case PNG:
  1.1472 +                d.writeType("png "); // biCompression - png for PNG
  1.1473 +                break;
  1.1474 +        }
  1.1475 +        // For compressed video and YUV formats, this member is a FOURCC code,
  1.1476 +        // specified as a DWORD in little-endian order. For example, YUYV video
  1.1477 +        // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
  1.1478 +        // Codes.
  1.1479 +        //
  1.1480 +        // For uncompressed RGB formats, the following values are possible:
  1.1481 +        //
  1.1482 +        // Value        Description
  1.1483 +        // BI_RGB       0x00000000 Uncompressed RGB.
  1.1484 +        // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
  1.1485 +        //                         Valid for 16-bpp and 32-bpp bitmaps.
  1.1486 +        //
  1.1487 +        // Note that BI_JPG and BI_PNG are not valid video formats.
  1.1488 +        //
  1.1489 +        // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
  1.1490 +        // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
  1.1491 +        // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
  1.1492 +        // structure to determine the specific RGB type.
  1.1493 +
  1.1494 +        switch (videoFormat) {
  1.1495 +            case RAW:
  1.1496 +                d.writeInt(0); // biSizeImage
  1.1497 +                break;
  1.1498 +            case RLE:
  1.1499 +            case JPG:
  1.1500 +            case PNG:
  1.1501 +            default:
  1.1502 +                if (imgDepth == 4) {
  1.1503 +                    d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
  1.1504 +                } else {
  1.1505 +                    int bytesPerPixel = Math.max(1, imgDepth / 8);
  1.1506 +                    d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
  1.1507 +                }
  1.1508 +                break;
  1.1509 +        }
  1.1510 +        // Specifies the size, in bytes, of the image. This can be set to 0 for
  1.1511 +        // uncompressed RGB bitmaps.
  1.1512 +
  1.1513 +        d.writeInt(0); // biXPelsPerMeter
  1.1514 +        // Specifies the horizontal resolution, in pixels per meter, of the
  1.1515 +        // target device for the bitmap.
  1.1516 +
  1.1517 +        d.writeInt(0); // biYPelsPerMeter
  1.1518 +        // Specifies the vertical resolution, in pixels per meter, of the target
  1.1519 +        // device for the bitmap.
  1.1520 +
  1.1521 +        d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
  1.1522 +        // Specifies the number of color indices in the color table that are
  1.1523 +        // actually used by the bitmap.
  1.1524 +
  1.1525 +        d.writeInt(0); // biClrImportant
  1.1526 +        // Specifies the number of color indices that are considered important
  1.1527 +        // for displaying the bitmap. If this value is zero, all colors are
  1.1528 +        // important.
  1.1529 +
  1.1530 +        if (palette != null) {
  1.1531 +            for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
  1.1532 +                /*
  1.1533 +                 * typedef struct tagRGBQUAD {
  1.1534 +                BYTE rgbBlue;
  1.1535 +                BYTE rgbGreen;
  1.1536 +                BYTE rgbRed;
  1.1537 +                BYTE rgbReserved; // This member is reserved and must be zero.
  1.1538 +                } RGBQUAD;
  1.1539 +                 */
  1.1540 +                d.write(palette.getBlue(i));
  1.1541 +                d.write(palette.getGreen(i));
  1.1542 +                d.write(palette.getRed(i));
  1.1543 +                d.write(0);
  1.1544 +            }
  1.1545 +        }
  1.1546 +
  1.1547 +
  1.1548 +        // -----------------
  1.1549 +        aviChunk.finish();
  1.1550 +    }
  1.1551 +}