annotate src/ca/randelshofer/AVIOutputStream.java @ 30:be37291c62b8

propagated AudioFormat to other classes.
author Robert McIntyre <rlm@mit.edu>
date Sun, 30 Oct 2011 10:11:21 -0700
parents 4c5fc53778c1
children 784a3f4e6202
rev   line source
rlm@10 1 /**
rlm@10 2 * @(#)AVIOutputStream.java 1.5.1 2011-01-17
rlm@10 3 *
rlm@10 4 * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
rlm@10 5 * All rights reserved.
rlm@10 6 *
rlm@10 7 * You may not use, copy or modify this file, except in compliance with the
rlm@10 8 * license agreement you entered into with Werner Randelshofer.
rlm@10 9 * For details see accompanying license terms.
rlm@10 10 */
rlm@10 11 package ca.randelshofer;
rlm@10 12
rlm@10 13 import java.awt.Dimension;
rlm@10 14 import java.awt.image.BufferedImage;
rlm@10 15 import java.awt.image.DataBufferByte;
rlm@10 16 import java.awt.image.IndexColorModel;
rlm@10 17 import java.awt.image.WritableRaster;
rlm@10 18 import java.io.File;
rlm@10 19 import java.io.FileInputStream;
rlm@10 20 import java.io.IOException;
rlm@10 21 import java.io.InputStream;
rlm@10 22 import java.io.OutputStream;
rlm@10 23 import java.util.Arrays;
rlm@10 24 import java.util.Date;
rlm@10 25 import java.util.LinkedList;
rlm@10 26
rlm@10 27 import javax.imageio.IIOImage;
rlm@10 28 import javax.imageio.ImageIO;
rlm@10 29 import javax.imageio.ImageWriteParam;
rlm@10 30 import javax.imageio.ImageWriter;
rlm@10 31 import javax.imageio.stream.FileImageOutputStream;
rlm@10 32 import javax.imageio.stream.ImageOutputStream;
rlm@10 33 import javax.imageio.stream.MemoryCacheImageOutputStream;
rlm@10 34
rlm@10 35 /**
rlm@10 36 * This class supports writing of images into an AVI 1.0 video file.
rlm@10 37 * <p>
rlm@10 38 * The images are written as video frames.
rlm@10 39 * <p>
rlm@10 40 * Video frames can be encoded with one of the following formats:
rlm@10 41 * <ul>
rlm@10 42 * <li>JPEG</li>
rlm@10 43 * <li>PNG</li>
rlm@10 44 * <li>RAW</li>
rlm@10 45 * <li>RLE</li>
rlm@10 46 * </ul>
rlm@10 47 * All frames must have the same format.
rlm@10 48 * When JPG is used each frame can have an individual encoding quality.
rlm@10 49 * <p>
rlm@10 50 * All frames in an AVI file must have the same duration. The duration can
rlm@10 51 * be set by setting an appropriate pair of values using methods
rlm@10 52 * {@link #setFrameRate} and {@link #setTimeScale}.
rlm@10 53 * <p>
rlm@10 54 * The length of an AVI 1.0 file is limited to 1 GB.
rlm@10 55 * This class supports lengths of up to 4 GB, but such files may not work on
rlm@10 56 * all players.
rlm@10 57 * <p>
rlm@10 58 * For detailed information about the AVI RIFF file format see:<br>
rlm@10 59 * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
rlm@10 60 * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
rlm@10 61 * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
rlm@10 62 *
rlm@10 63 * @author Werner Randelshofer
rlm@10 64 * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
rlm@10 65 * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
rlm@10 66 * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
rlm@10 67 * in "idx1" chunk.
rlm@10 68 * <br>1.3.2 2010-12-27 File size limit is 1 GB.
rlm@10 69 * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
rlm@10 70 * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
rlm@10 71 * Added method getVideoDimension().
rlm@10 72 * <br>1.2 2009-08-29 Adds support for RAW video format.
rlm@10 73 * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
rlm@10 74 * chunk. Changed the API to reflect that AVI works with frame rates instead of
rlm@10 75 * with frame durations.
rlm@10 76 * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
rlm@10 77 * encoded video.
rlm@10 78 * <br>1.0 2008-08-11 Created.
rlm@10 79 */
rlm@10 80 public class AVIOutputStream {
rlm@10 81
rlm@10 82 /**
rlm@10 83 * Underlying output stream.
rlm@10 84 */
rlm@10 85 private ImageOutputStream out;
rlm@10 86 /** The offset of the QuickTime stream in the underlying ImageOutputStream.
rlm@10 87 * Normally this is 0 unless the underlying stream already contained data
rlm@10 88 * when it was passed to the constructor.
rlm@10 89 */
rlm@10 90 private long streamOffset;
rlm@10 91 /** Previous frame for delta compression. */
rlm@10 92
rlm@10 93 /**
rlm@10 94 * Supported video encodings.
rlm@10 95 */
rlm@10 96 public static enum VideoFormat {
rlm@10 97
rlm@10 98 RAW, RLE, JPG, PNG;
rlm@10 99 }
rlm@10 100 /**
rlm@10 101 * Current video formats.
rlm@10 102 */
rlm@10 103 private VideoFormat videoFormat;
rlm@10 104 /**
rlm@10 105 * Quality of JPEG encoded video frames.
rlm@10 106 */
rlm@10 107 private float quality = 0.9f;
rlm@10 108 /**
rlm@10 109 * Width of the video frames. All frames must have the same width.
rlm@10 110 * The value -1 is used to mark unspecified width.
rlm@10 111 */
rlm@10 112 private int imgWidth = -1;
rlm@10 113 /**
rlm@10 114 * Height of the video frames. All frames must have the same height.
rlm@10 115 * The value -1 is used to mark unspecified height.
rlm@10 116 */
rlm@10 117 private int imgHeight = -1;
rlm@10 118 /** Number of bits per pixel. */
rlm@10 119 private int imgDepth = 24;
rlm@10 120 /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
rlm@10 121 private IndexColorModel palette;
rlm@10 122 private IndexColorModel previousPalette;
rlm@10 123 /** Video encoder. */
rlm@10 124
rlm@10 125 /**
rlm@10 126 * The timeScale of the movie.
rlm@10 127 * <p>
rlm@10 128 * Used with frameRate to specify the time scale that this stream will use.
rlm@10 129 * Dividing frameRate by timeScale gives the number of samples per second.
rlm@10 130 * For video streams, this is the frame rate. For audio streams, this rate
rlm@10 131 * corresponds to the time needed to play nBlockAlign bytes of audio, which
rlm@10 132 * for PCM audio is the just the sample rate.
rlm@10 133 */
rlm@10 134 private int timeScale = 1;
rlm@10 135 /**
rlm@10 136 * The frameRate of the movie in timeScale units.
rlm@10 137 * <p>
rlm@10 138 * @see timeScale
rlm@10 139 */
rlm@10 140 private int frameRate = 30;
rlm@10 141 /**
rlm@10 142 * The states of the movie output stream.
rlm@10 143 */
rlm@10 144 private static enum States {
rlm@10 145
rlm@10 146 STARTED, FINISHED, CLOSED;
rlm@10 147 }
rlm@10 148 /**
rlm@10 149 * The current state of the movie output stream.
rlm@10 150 */
rlm@10 151 private States state = States.FINISHED;
rlm@10 152
rlm@10 153 /**
rlm@10 154 * AVI stores media data in samples.
rlm@10 155 * A sample is a single element in a sequence of time-ordered data.
rlm@10 156 */
rlm@10 157 private static class Sample {
rlm@10 158
rlm@10 159 String chunkType;
rlm@10 160 /** Offset of the sample relative to the start of the AVI file.
rlm@10 161 */
rlm@10 162 long offset;
rlm@10 163 /** Data length of the sample. */
rlm@10 164 long length;
rlm@10 165 /** Whether the sample is a sync-sample. */
rlm@10 166 boolean isSync;
rlm@10 167
rlm@10 168 /**
rlm@10 169 * Creates a new sample.
rlm@10 170 * @param duration
rlm@10 171 * @param offset
rlm@10 172 * @param length
rlm@10 173 */
rlm@10 174 public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
rlm@10 175 this.chunkType = chunkId;
rlm@10 176 this.offset = offset;
rlm@10 177 this.length = length;
rlm@10 178 this.isSync = isSync;
rlm@10 179 }
rlm@10 180 }
rlm@10 181 /**
rlm@10 182 * List of video frames.
rlm@10 183 */
rlm@10 184 private LinkedList<Sample> videoFrames;
rlm@10 185 /**
rlm@10 186 * This chunk holds the whole AVI content.
rlm@10 187 */
rlm@10 188 private CompositeChunk aviChunk;
rlm@10 189 /**
rlm@10 190 * This chunk holds the movie frames.
rlm@10 191 */
rlm@10 192 private CompositeChunk moviChunk;
rlm@10 193 /**
rlm@10 194 * This chunk holds the AVI Main Header.
rlm@10 195 */
rlm@10 196 FixedSizeDataChunk avihChunk;
rlm@10 197 /**
rlm@10 198 * This chunk holds the AVI Stream Header.
rlm@10 199 */
rlm@10 200 FixedSizeDataChunk strhChunk;
rlm@10 201 /**
rlm@10 202 * This chunk holds the AVI Stream Format Header.
rlm@10 203 */
rlm@10 204 FixedSizeDataChunk strfChunk;
rlm@10 205
rlm@10 206 /**
rlm@10 207 * Chunk base class.
rlm@10 208 */
rlm@10 209 private abstract class Chunk {
rlm@10 210
rlm@10 211 /**
rlm@10 212 * The chunkType of the chunk. A String with the length of 4 characters.
rlm@10 213 */
rlm@10 214 protected String chunkType;
rlm@10 215 /**
rlm@10 216 * The offset of the chunk relative to the start of the
rlm@10 217 * ImageOutputStream.
rlm@10 218 */
rlm@10 219 protected long offset;
rlm@10 220
rlm@10 221 /**
rlm@10 222 * Creates a new Chunk at the current position of the ImageOutputStream.
rlm@10 223 * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
rlm@10 224 */
rlm@10 225 public Chunk(String chunkType) throws IOException {
rlm@10 226 this.chunkType = chunkType;
rlm@10 227 offset = getRelativeStreamPosition();
rlm@10 228 }
rlm@10 229
rlm@10 230 /**
rlm@10 231 * Writes the chunk to the ImageOutputStream and disposes it.
rlm@10 232 */
rlm@10 233 public abstract void finish() throws IOException;
rlm@10 234
rlm@10 235 /**
rlm@10 236 * Returns the size of the chunk including the size of the chunk header.
rlm@10 237 * @return The size of the chunk.
rlm@10 238 */
rlm@10 239 public abstract long size();
rlm@10 240 }
rlm@10 241
rlm@10 242 /**
rlm@10 243 * A CompositeChunk contains an ordered list of Chunks.
rlm@10 244 */
rlm@10 245 private class CompositeChunk extends Chunk {
rlm@10 246
rlm@10 247 /**
rlm@10 248 * The type of the composite. A String with the length of 4 characters.
rlm@10 249 */
rlm@10 250 protected String compositeType;
rlm@10 251 private LinkedList<Chunk> children;
rlm@10 252 private boolean finished;
rlm@10 253
rlm@10 254 /**
rlm@10 255 * Creates a new CompositeChunk at the current position of the
rlm@10 256 * ImageOutputStream.
rlm@10 257 * @param compositeType The type of the composite.
rlm@10 258 * @param chunkType The type of the chunk.
rlm@10 259 */
rlm@10 260 public CompositeChunk(String compositeType, String chunkType) throws IOException {
rlm@10 261 super(chunkType);
rlm@10 262 this.compositeType = compositeType;
rlm@10 263 //out.write
rlm@10 264 out.writeLong(0); // make room for the chunk header
rlm@10 265 out.writeInt(0); // make room for the chunk header
rlm@10 266 children = new LinkedList<Chunk>();
rlm@10 267 }
rlm@10 268
rlm@10 269 public void add(Chunk child) throws IOException {
rlm@10 270 if (children.size() > 0) {
rlm@10 271 children.getLast().finish();
rlm@10 272 }
rlm@10 273 children.add(child);
rlm@10 274 }
rlm@10 275
rlm@10 276 /**
rlm@10 277 * Writes the chunk and all its children to the ImageOutputStream
rlm@10 278 * and disposes of all resources held by the chunk.
rlm@10 279 * @throws java.io.IOException
rlm@10 280 */
rlm@10 281 @Override
rlm@10 282 public void finish() throws IOException {
rlm@10 283 if (!finished) {
rlm@10 284 if (size() > 0xffffffffL) {
rlm@10 285 throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
rlm@10 286 }
rlm@10 287
rlm@10 288 long pointer = getRelativeStreamPosition();
rlm@10 289 seekRelative(offset);
rlm@10 290
rlm@10 291 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@10 292 headerData.writeType(compositeType);
rlm@10 293 headerData.writeUInt(size() - 8);
rlm@10 294 headerData.writeType(chunkType);
rlm@10 295 for (Chunk child : children) {
rlm@10 296 child.finish();
rlm@10 297 }
rlm@10 298 seekRelative(pointer);
rlm@10 299 if (size() % 2 == 1) {
rlm@10 300 out.writeByte(0); // write pad byte
rlm@10 301 }
rlm@10 302 finished = true;
rlm@10 303 }
rlm@10 304 }
rlm@10 305
rlm@10 306 @Override
rlm@10 307 public long size() {
rlm@10 308 long length = 12;
rlm@10 309 for (Chunk child : children) {
rlm@10 310 length += child.size() + child.size() % 2;
rlm@10 311 }
rlm@10 312 return length;
rlm@10 313 }
rlm@10 314 }
rlm@10 315
rlm@10 316 /**
rlm@10 317 * Data Chunk.
rlm@10 318 */
rlm@10 319 private class DataChunk extends Chunk {
rlm@10 320
rlm@10 321 private DataChunkOutputStream data;
rlm@10 322 private boolean finished;
rlm@10 323
rlm@10 324 /**
rlm@10 325 * Creates a new DataChunk at the current position of the
rlm@10 326 * ImageOutputStream.
rlm@10 327 * @param chunkType The chunkType of the chunk.
rlm@10 328 */
rlm@10 329 public DataChunk(String name) throws IOException {
rlm@10 330 super(name);
rlm@10 331 out.writeLong(0); // make room for the chunk header
rlm@10 332 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
rlm@10 333 }
rlm@10 334
rlm@10 335 public DataChunkOutputStream getOutputStream() {
rlm@10 336 if (finished) {
rlm@10 337 throw new IllegalStateException("DataChunk is finished");
rlm@10 338 }
rlm@10 339 return data;
rlm@10 340 }
rlm@10 341
rlm@10 342 @Override
rlm@10 343 public void finish() throws IOException {
rlm@10 344 if (!finished) {
rlm@10 345 long sizeBefore = size();
rlm@10 346
rlm@10 347 if (size() > 0xffffffffL) {
rlm@10 348 throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
rlm@10 349 }
rlm@10 350
rlm@10 351 long pointer = getRelativeStreamPosition();
rlm@10 352 seekRelative(offset);
rlm@10 353
rlm@10 354 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@10 355 headerData.writeType(chunkType);
rlm@10 356 headerData.writeUInt(size() - 8);
rlm@10 357 seekRelative(pointer);
rlm@10 358 if (size() % 2 == 1) {
rlm@10 359 out.writeByte(0); // write pad byte
rlm@10 360 }
rlm@10 361 finished = true;
rlm@10 362 long sizeAfter = size();
rlm@10 363 if (sizeBefore != sizeAfter) {
rlm@10 364 System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
rlm@10 365 }
rlm@10 366 }
rlm@10 367 }
rlm@10 368
rlm@10 369 @Override
rlm@10 370 public long size() {
rlm@10 371 return 8 + data.size();
rlm@10 372 }
rlm@10 373 }
rlm@10 374
rlm@10 375 /**
rlm@10 376 * A DataChunk with a fixed size.
rlm@10 377 */
rlm@10 378 private class FixedSizeDataChunk extends Chunk {
rlm@10 379
rlm@10 380 private DataChunkOutputStream data;
rlm@10 381 private boolean finished;
rlm@10 382 private long fixedSize;
rlm@10 383
rlm@10 384 /**
rlm@10 385 * Creates a new DataChunk at the current position of the
rlm@10 386 * ImageOutputStream.
rlm@10 387 * @param chunkType The chunkType of the chunk.
rlm@10 388 */
rlm@10 389 public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
rlm@10 390 super(chunkType);
rlm@10 391 this.fixedSize = fixedSize;
rlm@10 392 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@10 393 data.writeType(chunkType);
rlm@10 394 data.writeUInt(fixedSize);
rlm@10 395 data.clearCount();
rlm@10 396
rlm@10 397 // Fill fixed size with nulls
rlm@10 398 byte[] buf = new byte[(int) Math.min(512, fixedSize)];
rlm@10 399 long written = 0;
rlm@10 400 while (written < fixedSize) {
rlm@10 401 data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
rlm@10 402 written += Math.min(buf.length, fixedSize - written);
rlm@10 403 }
rlm@10 404 if (fixedSize % 2 == 1) {
rlm@10 405 out.writeByte(0); // write pad byte
rlm@10 406 }
rlm@10 407 seekToStartOfData();
rlm@10 408 }
rlm@10 409
rlm@10 410 public DataChunkOutputStream getOutputStream() {
rlm@10 411 /*if (finished) {
rlm@10 412 throw new IllegalStateException("DataChunk is finished");
rlm@10 413 }*/
rlm@10 414 return data;
rlm@10 415 }
rlm@10 416
rlm@10 417 public void seekToStartOfData() throws IOException {
rlm@10 418 seekRelative(offset + 8);
rlm@10 419 data.clearCount();
rlm@10 420 }
rlm@10 421
rlm@10 422 public void seekToEndOfChunk() throws IOException {
rlm@10 423 seekRelative(offset + 8 + fixedSize + fixedSize % 2);
rlm@10 424 }
rlm@10 425
rlm@10 426 @Override
rlm@10 427 public void finish() throws IOException {
rlm@10 428 if (!finished) {
rlm@10 429 finished = true;
rlm@10 430 }
rlm@10 431 }
rlm@10 432
rlm@10 433 @Override
rlm@10 434 public long size() {
rlm@10 435 return 8 + fixedSize;
rlm@10 436 }
rlm@10 437 }
rlm@10 438
rlm@10 439 /**
rlm@10 440 * Creates a new AVI file with the specified video format and
rlm@10 441 * frame rate. The video has 24 bits per pixel.
rlm@10 442 *
rlm@10 443 * @param file the output file
rlm@10 444 * @param format Selects an encoder for the video format.
rlm@10 445 * @param bitsPerPixel the number of bits per pixel.
rlm@10 446 * @exception IllegalArgumentException if videoFormat is null or if
rlm@10 447 * frame rate is <= 0
rlm@10 448 */
rlm@10 449 public AVIOutputStream(File file, VideoFormat format) throws IOException {
rlm@10 450 this(file,format,24);
rlm@10 451 }
rlm@10 452 /**
rlm@10 453 * Creates a new AVI file with the specified video format and
rlm@10 454 * frame rate.
rlm@10 455 *
rlm@10 456 * @param file the output file
rlm@10 457 * @param format Selects an encoder for the video format.
rlm@10 458 * @param bitsPerPixel the number of bits per pixel.
rlm@10 459 * @exception IllegalArgumentException if videoFormat is null or if
rlm@10 460 * frame rate is <= 0
rlm@10 461 */
rlm@10 462 public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
rlm@10 463 if (format == null) {
rlm@10 464 throw new IllegalArgumentException("format must not be null");
rlm@10 465 }
rlm@10 466
rlm@10 467 if (file.exists()) {
rlm@10 468 file.delete();
rlm@10 469 }
rlm@10 470 this.out = new FileImageOutputStream(file);
rlm@10 471 this.streamOffset = 0;
rlm@10 472 this.videoFormat = format;
rlm@10 473 this.videoFrames = new LinkedList<Sample>();
rlm@10 474 this.imgDepth = bitsPerPixel;
rlm@10 475 if (imgDepth == 4) {
rlm@10 476 byte[] gray = new byte[16];
rlm@10 477 for (int i = 0; i < gray.length; i++) {
rlm@10 478 gray[i] = (byte) ((i << 4) | i);
rlm@10 479 }
rlm@10 480 palette = new IndexColorModel(4, 16, gray, gray, gray);
rlm@10 481 } else if (imgDepth == 8) {
rlm@10 482 byte[] gray = new byte[256];
rlm@10 483 for (int i = 0; i < gray.length; i++) {
rlm@10 484 gray[i] = (byte) i;
rlm@10 485 }
rlm@10 486 palette = new IndexColorModel(8, 256, gray, gray, gray);
rlm@10 487 }
rlm@10 488
rlm@10 489 }
rlm@10 490
rlm@10 491 /**
rlm@10 492 * Creates a new AVI output stream with the specified video format and
rlm@10 493 * framerate.
rlm@10 494 *
rlm@10 495 * @param out the underlying output stream
rlm@10 496 * @param format Selects an encoder for the video format.
rlm@10 497 * @exception IllegalArgumentException if videoFormat is null or if
rlm@10 498 * framerate is <= 0
rlm@10 499 */
rlm@10 500 public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
rlm@10 501 if (format == null) {
rlm@10 502 throw new IllegalArgumentException("format must not be null");
rlm@10 503 }
rlm@10 504 this.out = out;
rlm@10 505 this.streamOffset = out.getStreamPosition();
rlm@10 506 this.videoFormat = format;
rlm@10 507 this.videoFrames = new LinkedList<Sample>();
rlm@10 508 }
rlm@10 509
rlm@10 510 /**
rlm@10 511 * Used with frameRate to specify the time scale that this stream will use.
rlm@10 512 * Dividing frameRate by timeScale gives the number of samples per second.
rlm@10 513 * For video streams, this is the frame rate. For audio streams, this rate
rlm@10 514 * corresponds to the time needed to play nBlockAlign bytes of audio, which
rlm@10 515 * for PCM audio is the just the sample rate.
rlm@10 516 * <p>
rlm@10 517 * The default value is 1.
rlm@10 518 *
rlm@10 519 * @param newValue
rlm@10 520 */
rlm@10 521 public void setTimeScale(int newValue) {
rlm@10 522 if (newValue <= 0) {
rlm@10 523 throw new IllegalArgumentException("timeScale must be greater 0");
rlm@10 524 }
rlm@10 525 this.timeScale = newValue;
rlm@10 526 }
rlm@10 527
rlm@10 528 /**
rlm@10 529 * Returns the time scale of this media.
rlm@10 530 *
rlm@10 531 * @return time scale
rlm@10 532 */
rlm@10 533 public int getTimeScale() {
rlm@10 534 return timeScale;
rlm@10 535 }
rlm@10 536
rlm@10 537 /**
rlm@10 538 * Sets the rate of video frames in time scale units.
rlm@10 539 * <p>
rlm@10 540 * The default value is 30. Together with the default value 1 of timeScale
rlm@10 541 * this results in 30 frames pers second.
rlm@10 542 *
rlm@10 543 * @param newValue
rlm@10 544 */
rlm@10 545 public void setFrameRate(int newValue) {
rlm@10 546 if (newValue <= 0) {
rlm@10 547 throw new IllegalArgumentException("frameDuration must be greater 0");
rlm@10 548 }
rlm@10 549 if (state == States.STARTED) {
rlm@10 550 throw new IllegalStateException("frameDuration must be set before the first frame is written");
rlm@10 551 }
rlm@10 552 this.frameRate = newValue;
rlm@10 553 }
rlm@10 554
rlm@10 555 /**
rlm@10 556 * Returns the frame rate of this media.
rlm@10 557 *
rlm@10 558 * @return frame rate
rlm@10 559 */
rlm@10 560 public int getFrameRate() {
rlm@10 561 return frameRate;
rlm@10 562 }
rlm@10 563
rlm@10 564 /** Sets the global color palette. */
rlm@10 565 public void setPalette(IndexColorModel palette) {
rlm@10 566 this.palette = palette;
rlm@10 567 }
rlm@10 568
rlm@10 569 /**
rlm@10 570 * Sets the compression quality of the video track.
rlm@10 571 * A value of 0 stands for "high compression is important" a value of
rlm@10 572 * 1 for "high image quality is important".
rlm@10 573 * <p>
rlm@10 574 * Changing this value affects frames which are subsequently written
rlm@10 575 * to the AVIOutputStream. Frames which have already been written
rlm@10 576 * are not changed.
rlm@10 577 * <p>
rlm@10 578 * This value has only effect on videos encoded with JPG format.
rlm@10 579 * <p>
rlm@10 580 * The default value is 0.9.
rlm@10 581 *
rlm@10 582 * @param newValue
rlm@10 583 */
rlm@10 584 public void setVideoCompressionQuality(float newValue) {
rlm@10 585 this.quality = newValue;
rlm@10 586 }
rlm@10 587
rlm@10 588 /**
rlm@10 589 * Returns the video compression quality.
rlm@10 590 *
rlm@10 591 * @return video compression quality
rlm@10 592 */
rlm@10 593 public float getVideoCompressionQuality() {
rlm@10 594 return quality;
rlm@10 595 }
rlm@10 596
rlm@10 597 /**
rlm@10 598 * Sets the dimension of the video track.
rlm@10 599 * <p>
rlm@10 600 * You need to explicitly set the dimension, if you add all frames from
rlm@10 601 * files or input streams.
rlm@10 602 * <p>
rlm@10 603 * If you add frames from buffered images, then AVIOutputStream
rlm@10 604 * can determine the video dimension from the image width and height.
rlm@10 605 *
rlm@10 606 * @param width Must be greater than 0.
rlm@10 607 * @param height Must be greater than 0.
rlm@10 608 */
rlm@10 609 public void setVideoDimension(int width, int height) {
rlm@10 610 if (width < 1 || height < 1) {
rlm@10 611 throw new IllegalArgumentException("width and height must be greater zero.");
rlm@10 612 }
rlm@10 613 this.imgWidth = width;
rlm@10 614 this.imgHeight = height;
rlm@10 615 }
rlm@10 616
rlm@10 617 /**
rlm@10 618 * Gets the dimension of the video track.
rlm@10 619 * <p>
rlm@10 620 * Returns null if the dimension is not known.
rlm@10 621 */
rlm@10 622 public Dimension getVideoDimension() {
rlm@10 623 if (imgWidth < 1 || imgHeight < 1) {
rlm@10 624 return null;
rlm@10 625 }
rlm@10 626 return new Dimension(imgWidth, imgHeight);
rlm@10 627 }
rlm@10 628
rlm@10 629 /**
rlm@10 630 * Sets the state of the QuickTimeOutpuStream to started.
rlm@10 631 * <p>
rlm@10 632 * If the state is changed by this method, the prolog is
rlm@10 633 * written.
rlm@10 634 */
rlm@10 635 private void ensureStarted() throws IOException {
rlm@10 636 if (state != States.STARTED) {
rlm@10 637 new Date();
rlm@10 638 writeProlog();
rlm@10 639 state = States.STARTED;
rlm@10 640 }
rlm@10 641 }
rlm@10 642
rlm@10 643 /**
rlm@10 644 * Writes a frame to the video track.
rlm@10 645 * <p>
rlm@10 646 * If the dimension of the video track has not been specified yet, it
rlm@10 647 * is derived from the first buffered image added to the AVIOutputStream.
rlm@10 648 *
rlm@10 649 * @param image The frame image.
rlm@10 650 *
rlm@10 651 * @throws IllegalArgumentException if the duration is less than 1, or
rlm@10 652 * if the dimension of the frame does not match the dimension of the video
rlm@10 653 * track.
rlm@10 654 * @throws IOException if writing the image failed.
rlm@10 655 */
rlm@10 656 public void writeFrame(BufferedImage image) throws IOException {
rlm@10 657 ensureOpen();
rlm@10 658 ensureStarted();
rlm@10 659
rlm@10 660 // Get the dimensions of the first image
rlm@10 661 if (imgWidth == -1) {
rlm@10 662 imgWidth = image.getWidth();
rlm@10 663 imgHeight = image.getHeight();
rlm@10 664 } else {
rlm@10 665 // The dimension of the image must match the dimension of the video track
rlm@10 666 if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
rlm@10 667 throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
rlm@10 668 + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
rlm@10 669 + ") differs from image[0] (width="
rlm@10 670 + imgWidth + ", height=" + imgHeight);
rlm@10 671 }
rlm@10 672 }
rlm@10 673
rlm@10 674 DataChunk videoFrameChunk;
rlm@10 675 long offset = getRelativeStreamPosition();
rlm@10 676 boolean isSync = true;
rlm@10 677 switch (videoFormat) {
rlm@10 678 case RAW: {
rlm@10 679 switch (imgDepth) {
rlm@10 680 case 4: {
rlm@10 681 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
rlm@10 682 int[] imgRGBs = new int[16];
rlm@10 683 imgPalette.getRGBs(imgRGBs);
rlm@10 684 int[] previousRGBs = new int[16];
rlm@10 685 if (previousPalette == null) {
rlm@10 686 previousPalette = palette;
rlm@10 687 }
rlm@10 688 previousPalette.getRGBs(previousRGBs);
rlm@10 689 if (!Arrays.equals(imgRGBs, previousRGBs)) {
rlm@10 690 previousPalette = imgPalette;
rlm@10 691 DataChunk paletteChangeChunk = new DataChunk("00pc");
rlm@10 692 /*
rlm@10 693 int first = imgPalette.getMapSize();
rlm@10 694 int last = -1;
rlm@10 695 for (int i = 0; i < 16; i++) {
rlm@10 696 if (previousRGBs[i] != imgRGBs[i] && i < first) {
rlm@10 697 first = i;
rlm@10 698 }
rlm@10 699 if (previousRGBs[i] != imgRGBs[i] && i > last) {
rlm@10 700 last = i;
rlm@10 701 }
rlm@10 702 }*/
rlm@10 703 int first = 0;
rlm@10 704 int last = imgPalette.getMapSize() - 1;
rlm@10 705 /*
rlm@10 706 * typedef struct {
rlm@10 707 BYTE bFirstEntry;
rlm@10 708 BYTE bNumEntries;
rlm@10 709 WORD wFlags;
rlm@10 710 PALETTEENTRY peNew[];
rlm@10 711 } AVIPALCHANGE;
rlm@10 712 *
rlm@10 713 * typedef struct tagPALETTEENTRY {
rlm@10 714 BYTE peRed;
rlm@10 715 BYTE peGreen;
rlm@10 716 BYTE peBlue;
rlm@10 717 BYTE peFlags;
rlm@10 718 } PALETTEENTRY;
rlm@10 719 */
rlm@10 720 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
rlm@10 721 pOut.writeByte(first);//bFirstEntry
rlm@10 722 pOut.writeByte(last - first + 1);//bNumEntries
rlm@10 723 pOut.writeShort(0);//wFlags
rlm@10 724
rlm@10 725 for (int i = first; i <= last; i++) {
rlm@10 726 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
rlm@10 727 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
rlm@10 728 pOut.writeByte(imgRGBs[i] & 0xff); // blue
rlm@10 729 pOut.writeByte(0); // reserved*/
rlm@10 730 }
rlm@10 731
rlm@10 732 moviChunk.add(paletteChangeChunk);
rlm@10 733 paletteChangeChunk.finish();
rlm@10 734 long length = getRelativeStreamPosition() - offset;
rlm@10 735 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
rlm@10 736 offset = getRelativeStreamPosition();
rlm@10 737 }
rlm@10 738
rlm@10 739 videoFrameChunk = new DataChunk("00db");
rlm@10 740 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
rlm@10 741 byte[] rgb4 = new byte[imgWidth / 2];
rlm@10 742 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
rlm@10 743 for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
rlm@10 744 rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
rlm@10 745 }
rlm@10 746 videoFrameChunk.getOutputStream().write(rgb4);
rlm@10 747 }
rlm@10 748 break;
rlm@10 749 }
rlm@10 750 case 8: {
rlm@10 751 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
rlm@10 752 int[] imgRGBs = new int[256];
rlm@10 753 imgPalette.getRGBs(imgRGBs);
rlm@10 754 int[] previousRGBs = new int[256];
rlm@10 755 if (previousPalette == null) {
rlm@10 756 previousPalette = palette;
rlm@10 757 }
rlm@10 758 previousPalette.getRGBs(previousRGBs);
rlm@10 759 if (!Arrays.equals(imgRGBs, previousRGBs)) {
rlm@10 760 previousPalette = imgPalette;
rlm@10 761 DataChunk paletteChangeChunk = new DataChunk("00pc");
rlm@10 762 /*
rlm@10 763 int first = imgPalette.getMapSize();
rlm@10 764 int last = -1;
rlm@10 765 for (int i = 0; i < 16; i++) {
rlm@10 766 if (previousRGBs[i] != imgRGBs[i] && i < first) {
rlm@10 767 first = i;
rlm@10 768 }
rlm@10 769 if (previousRGBs[i] != imgRGBs[i] && i > last) {
rlm@10 770 last = i;
rlm@10 771 }
rlm@10 772 }*/
rlm@10 773 int first = 0;
rlm@10 774 int last = imgPalette.getMapSize() - 1;
rlm@10 775 /*
rlm@10 776 * typedef struct {
rlm@10 777 BYTE bFirstEntry;
rlm@10 778 BYTE bNumEntries;
rlm@10 779 WORD wFlags;
rlm@10 780 PALETTEENTRY peNew[];
rlm@10 781 } AVIPALCHANGE;
rlm@10 782 *
rlm@10 783 * typedef struct tagPALETTEENTRY {
rlm@10 784 BYTE peRed;
rlm@10 785 BYTE peGreen;
rlm@10 786 BYTE peBlue;
rlm@10 787 BYTE peFlags;
rlm@10 788 } PALETTEENTRY;
rlm@10 789 */
rlm@10 790 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
rlm@10 791 pOut.writeByte(first);//bFirstEntry
rlm@10 792 pOut.writeByte(last - first + 1);//bNumEntries
rlm@10 793 pOut.writeShort(0);//wFlags
rlm@10 794
rlm@10 795 for (int i = first; i <= last; i++) {
rlm@10 796 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
rlm@10 797 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
rlm@10 798 pOut.writeByte(imgRGBs[i] & 0xff); // blue
rlm@10 799 pOut.writeByte(0); // reserved*/
rlm@10 800 }
rlm@10 801
rlm@10 802 moviChunk.add(paletteChangeChunk);
rlm@10 803 paletteChangeChunk.finish();
rlm@10 804 long length = getRelativeStreamPosition() - offset;
rlm@10 805 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
rlm@10 806 offset = getRelativeStreamPosition();
rlm@10 807 }
rlm@10 808
rlm@10 809 videoFrameChunk = new DataChunk("00db");
rlm@10 810 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
rlm@10 811 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
rlm@10 812 videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
rlm@10 813 }
rlm@10 814 break;
rlm@10 815 }
rlm@10 816 default: {
rlm@10 817 videoFrameChunk = new DataChunk("00db");
rlm@10 818 WritableRaster raster = image.getRaster();
rlm@10 819 int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
rlm@10 820 byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
rlm@10 821 for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
rlm@10 822 raster.getPixels(0, y, imgWidth, 1, raw);
rlm@10 823 for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
rlm@10 824 bytes[x + 2] = (byte) raw[x]; // Blue
rlm@10 825 bytes[x + 1] = (byte) raw[x + 1]; // Green
rlm@10 826 bytes[x] = (byte) raw[x + 2]; // Red
rlm@10 827 }
rlm@10 828 videoFrameChunk.getOutputStream().write(bytes);
rlm@10 829 }
rlm@10 830 break;
rlm@10 831 }
rlm@10 832 }
rlm@10 833 break;
rlm@10 834 }
rlm@10 835
rlm@10 836 case JPG: {
rlm@10 837 videoFrameChunk = new DataChunk("00dc");
rlm@10 838 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
rlm@10 839 ImageWriteParam iwParam = iw.getDefaultWriteParam();
rlm@10 840 iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
rlm@10 841 iwParam.setCompressionQuality(quality);
rlm@10 842 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
rlm@10 843 iw.setOutput(imgOut);
rlm@10 844 IIOImage img = new IIOImage(image, null, null);
rlm@10 845 iw.write(null, img, iwParam);
rlm@10 846 iw.dispose();
rlm@10 847 break;
rlm@10 848 }
rlm@10 849 case PNG:
rlm@10 850 default: {
rlm@10 851 videoFrameChunk = new DataChunk("00dc");
rlm@10 852 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
rlm@10 853 ImageWriteParam iwParam = iw.getDefaultWriteParam();
rlm@10 854 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
rlm@10 855 iw.setOutput(imgOut);
rlm@10 856 IIOImage img = new IIOImage(image, null, null);
rlm@10 857 iw.write(null, img, iwParam);
rlm@10 858 iw.dispose();
rlm@10 859 break;
rlm@10 860 }
rlm@10 861 }
rlm@10 862 long length = getRelativeStreamPosition() - offset;
rlm@10 863 moviChunk.add(videoFrameChunk);
rlm@10 864 videoFrameChunk.finish();
rlm@10 865
rlm@10 866 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
rlm@10 867 if (getRelativeStreamPosition() > 1L << 32) {
rlm@10 868 throw new IOException("AVI file is larger than 4 GB");
rlm@10 869 }
rlm@10 870 }
rlm@10 871
rlm@10 872 /**
rlm@10 873 * Writes a frame from a file to the video track.
rlm@10 874 * <p>
rlm@10 875 * This method does not inspect the contents of the file.
rlm@10 876 * For example, Its your responsibility to only add JPG files if you have
rlm@10 877 * chosen the JPEG video format.
rlm@10 878 * <p>
rlm@10 879 * If you add all frames from files or from input streams, then you
rlm@10 880 * have to explicitly set the dimension of the video track before you
rlm@10 881 * call finish() or close().
rlm@10 882 *
rlm@10 883 * @param file The file which holds the image data.
rlm@10 884 *
rlm@10 885 * @throws IllegalStateException if the duration is less than 1.
rlm@10 886 * @throws IOException if writing the image failed.
rlm@10 887 */
rlm@10 888 public void writeFrame(File file) throws IOException {
rlm@10 889 FileInputStream in = null;
rlm@10 890 try {
rlm@10 891 in = new FileInputStream(file);
rlm@10 892 writeFrame(in);
rlm@10 893 } finally {
rlm@10 894 if (in != null) {
rlm@10 895 in.close();
rlm@10 896 }
rlm@10 897 }
rlm@10 898 }
rlm@10 899
rlm@10 900 /**
rlm@10 901 * Writes a frame to the video track.
rlm@10 902 * <p>
rlm@10 903 * This method does not inspect the contents of the file.
rlm@10 904 * For example, its your responsibility to only add JPG files if you have
rlm@10 905 * chosen the JPEG video format.
rlm@10 906 * <p>
rlm@10 907 * If you add all frames from files or from input streams, then you
rlm@10 908 * have to explicitly set the dimension of the video track before you
rlm@10 909 * call finish() or close().
rlm@10 910 *
rlm@10 911 * @param in The input stream which holds the image data.
rlm@10 912 *
rlm@10 913 * @throws IllegalArgumentException if the duration is less than 1.
rlm@10 914 * @throws IOException if writing the image failed.
rlm@10 915 */
rlm@10 916 public void writeFrame(InputStream in) throws IOException {
rlm@10 917 ensureOpen();
rlm@10 918 ensureStarted();
rlm@10 919
rlm@10 920 DataChunk videoFrameChunk = new DataChunk(
rlm@10 921 videoFormat == VideoFormat.RAW ? "00db" : "00dc");
rlm@10 922 moviChunk.add(videoFrameChunk);
rlm@10 923 OutputStream mdatOut = videoFrameChunk.getOutputStream();
rlm@10 924 long offset = getRelativeStreamPosition();
rlm@10 925 byte[] buf = new byte[512];
rlm@10 926 int len;
rlm@10 927 while ((len = in.read(buf)) != -1) {
rlm@10 928 mdatOut.write(buf, 0, len);
rlm@10 929 }
rlm@10 930 long length = getRelativeStreamPosition() - offset;
rlm@10 931 videoFrameChunk.finish();
rlm@10 932 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
rlm@10 933 if (getRelativeStreamPosition() > 1L << 32) {
rlm@10 934 throw new IOException("AVI file is larger than 4 GB");
rlm@10 935 }
rlm@10 936 }
rlm@10 937
rlm@10 938 /**
rlm@10 939 * Closes the movie file as well as the stream being filtered.
rlm@10 940 *
rlm@10 941 * @exception IOException if an I/O error has occurred
rlm@10 942 */
rlm@10 943 public void close() throws IOException {
rlm@10 944 if (state == States.STARTED) {
rlm@10 945 finish();
rlm@10 946 }
rlm@10 947 if (state != States.CLOSED) {
rlm@10 948 out.close();
rlm@10 949 state = States.CLOSED;
rlm@10 950 }
rlm@10 951 }
rlm@10 952
rlm@10 953 /**
rlm@10 954 * Finishes writing the contents of the AVI output stream without closing
rlm@10 955 * the underlying stream. Use this method when applying multiple filters
rlm@10 956 * in succession to the same output stream.
rlm@10 957 *
rlm@10 958 * @exception IllegalStateException if the dimension of the video track
rlm@10 959 * has not been specified or determined yet.
rlm@10 960 * @exception IOException if an I/O exception has occurred
rlm@10 961 */
rlm@10 962 public void finish() throws IOException {
rlm@10 963 ensureOpen();
rlm@10 964 if (state != States.FINISHED) {
rlm@10 965 if (imgWidth == -1 || imgHeight == -1) {
rlm@10 966 throw new IllegalStateException("image width and height must be specified");
rlm@10 967 }
rlm@10 968
rlm@10 969 moviChunk.finish();
rlm@10 970 writeEpilog();
rlm@10 971 state = States.FINISHED;
rlm@10 972 imgWidth = imgHeight = -1;
rlm@10 973 }
rlm@10 974 }
rlm@10 975
rlm@10 976 /**
rlm@10 977 * Check to make sure that this stream has not been closed
rlm@10 978 */
rlm@10 979 private void ensureOpen() throws IOException {
rlm@10 980 if (state == States.CLOSED) {
rlm@10 981 throw new IOException("Stream closed");
rlm@10 982 }
rlm@10 983 }
rlm@10 984
rlm@10 985 /** Gets the position relative to the beginning of the QuickTime stream.
rlm@10 986 * <p>
rlm@10 987 * Usually this value is equal to the stream position of the underlying
rlm@10 988 * ImageOutputStream, but can be larger if the underlying stream already
rlm@10 989 * contained data.
rlm@10 990 *
rlm@10 991 * @return The relative stream position.
rlm@10 992 * @throws IOException
rlm@10 993 */
rlm@10 994 private long getRelativeStreamPosition() throws IOException {
rlm@10 995 return out.getStreamPosition() - streamOffset;
rlm@10 996 }
rlm@10 997
rlm@10 998 /** Seeks relative to the beginning of the QuickTime stream.
rlm@10 999 * <p>
rlm@10 1000 * Usually this equal to seeking in the underlying ImageOutputStream, but
rlm@10 1001 * can be different if the underlying stream already contained data.
rlm@10 1002 *
rlm@10 1003 */
rlm@10 1004 private void seekRelative(long newPosition) throws IOException {
rlm@10 1005 out.seek(newPosition + streamOffset);
rlm@10 1006 }
rlm@10 1007
rlm@10 1008 private void writeProlog() throws IOException {
rlm@10 1009 // The file has the following structure:
rlm@10 1010 //
rlm@10 1011 // .RIFF AVI
rlm@10 1012 // ..avih (AVI Header Chunk)
rlm@10 1013 // ..LIST strl
rlm@10 1014 // ...strh (Stream Header Chunk)
rlm@10 1015 // ...strf (Stream Format Chunk)
rlm@10 1016 // ..LIST movi
rlm@10 1017 // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
rlm@10 1018 // ..idx1 (List of video data chunks and their location in the file)
rlm@10 1019
rlm@10 1020 // The RIFF AVI Chunk holds the complete movie
rlm@10 1021 aviChunk = new CompositeChunk("RIFF", "AVI ");
rlm@10 1022 CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
rlm@10 1023
rlm@10 1024 // Write empty AVI Main Header Chunk - we fill the data in later
rlm@10 1025 aviChunk.add(hdrlChunk);
rlm@10 1026 avihChunk = new FixedSizeDataChunk("avih", 56);
rlm@10 1027 avihChunk.seekToEndOfChunk();
rlm@10 1028 hdrlChunk.add(avihChunk);
rlm@10 1029
rlm@10 1030 CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
rlm@10 1031 hdrlChunk.add(strlChunk);
rlm@10 1032
rlm@10 1033 // Write empty AVI Stream Header Chunk - we fill the data in later
rlm@10 1034 strhChunk = new FixedSizeDataChunk("strh", 56);
rlm@10 1035 strhChunk.seekToEndOfChunk();
rlm@10 1036 strlChunk.add(strhChunk);
rlm@10 1037 strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
rlm@10 1038 strfChunk.seekToEndOfChunk();
rlm@10 1039 strlChunk.add(strfChunk);
rlm@10 1040
rlm@10 1041 moviChunk = new CompositeChunk("LIST", "movi");
rlm@10 1042 aviChunk.add(moviChunk);
rlm@10 1043
rlm@10 1044
rlm@10 1045 }
rlm@10 1046
rlm@10 1047 private void writeEpilog() throws IOException {
rlm@10 1048
rlm@10 1049 long bufferSize = 0;
rlm@10 1050 for (Sample s : videoFrames) {
rlm@10 1051 if (s.length > bufferSize) {
rlm@10 1052 bufferSize = s.length;
rlm@10 1053 }
rlm@10 1054 }
rlm@10 1055
rlm@10 1056
rlm@10 1057 DataChunkOutputStream d;
rlm@10 1058
rlm@10 1059 /* Create Idx1 Chunk and write data
rlm@10 1060 * -------------
rlm@10 1061 typedef struct _avioldindex {
rlm@10 1062 FOURCC fcc;
rlm@10 1063 DWORD cb;
rlm@10 1064 struct _avioldindex_entry {
rlm@10 1065 DWORD dwChunkId;
rlm@10 1066 DWORD dwFlags;
rlm@10 1067 DWORD dwOffset;
rlm@10 1068 DWORD dwSize;
rlm@10 1069 } aIndex[];
rlm@10 1070 } AVIOLDINDEX;
rlm@10 1071 */
rlm@10 1072 DataChunk idx1Chunk = new DataChunk("idx1");
rlm@10 1073 aviChunk.add(idx1Chunk);
rlm@10 1074 d = idx1Chunk.getOutputStream();
rlm@10 1075 long moviListOffset = moviChunk.offset + 8;
rlm@10 1076 //moviListOffset = 0;
rlm@10 1077 for (Sample f : videoFrames) {
rlm@10 1078
rlm@10 1079 d.writeType(f.chunkType); // dwChunkId
rlm@10 1080 // Specifies a FOURCC that identifies a stream in the AVI file. The
rlm@10 1081 // FOURCC must have the form 'xxyy' where xx is the stream number and yy
rlm@10 1082 // is a two-character code that identifies the contents of the stream:
rlm@10 1083 //
rlm@10 1084 // Two-character code Description
rlm@10 1085 // db Uncompressed video frame
rlm@10 1086 // dc Compressed video frame
rlm@10 1087 // pc Palette change
rlm@10 1088 // wb Audio data
rlm@10 1089
rlm@10 1090 d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
rlm@10 1091 | (f.isSync ? 0x10 : 0x0)); // dwFlags
rlm@10 1092 // Specifies a bitwise combination of zero or more of the following
rlm@10 1093 // flags:
rlm@10 1094 //
rlm@10 1095 // Value Name Description
rlm@10 1096 // 0x10 AVIIF_KEYFRAME The data chunk is a key frame.
rlm@10 1097 // 0x1 AVIIF_LIST The data chunk is a 'rec ' list.
rlm@10 1098 // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the
rlm@10 1099 // stream. For example, this flag should be set for
rlm@10 1100 // palette changes.
rlm@10 1101
rlm@10 1102 d.writeUInt(f.offset - moviListOffset); // dwOffset
rlm@10 1103 // Specifies the location of the data chunk in the file. The value
rlm@10 1104 // should be specified as an offset, in bytes, from the start of the
rlm@10 1105 // 'movi' list; however, in some AVI files it is given as an offset from
rlm@10 1106 // the start of the file.
rlm@10 1107
rlm@10 1108 d.writeUInt(f.length); // dwSize
rlm@10 1109 // Specifies the size of the data chunk, in bytes.
rlm@10 1110 }
rlm@10 1111 idx1Chunk.finish();
rlm@10 1112
rlm@10 1113 /* Write Data into AVI Main Header Chunk
rlm@10 1114 * -------------
rlm@10 1115 * The AVIMAINHEADER structure defines global information in an AVI file.
rlm@10 1116 * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
rlm@10 1117 typedef struct _avimainheader {
rlm@10 1118 FOURCC fcc;
rlm@10 1119 DWORD cb;
rlm@10 1120 DWORD dwMicroSecPerFrame;
rlm@10 1121 DWORD dwMaxBytesPerSec;
rlm@10 1122 DWORD dwPaddingGranularity;
rlm@10 1123 DWORD dwFlags;
rlm@10 1124 DWORD dwTotalFrames;
rlm@10 1125 DWORD dwInitialFrames;
rlm@10 1126 DWORD dwStreams;
rlm@10 1127 DWORD dwSuggestedBufferSize;
rlm@10 1128 DWORD dwWidth;
rlm@10 1129 DWORD dwHeight;
rlm@10 1130 DWORD dwReserved[4];
rlm@10 1131 } AVIMAINHEADER; */
rlm@10 1132 avihChunk.seekToStartOfData();
rlm@10 1133 d = avihChunk.getOutputStream();
rlm@10 1134
rlm@10 1135 d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
rlm@10 1136 // Specifies the number of microseconds between frames.
rlm@10 1137 // This value indicates the overall timing for the file.
rlm@10 1138
rlm@10 1139 d.writeUInt(0); // dwMaxBytesPerSec
rlm@10 1140 // Specifies the approximate maximum data rate of the file.
rlm@10 1141 // This value indicates the number of bytes per second the system
rlm@10 1142 // must handle to present an AVI sequence as specified by the other
rlm@10 1143 // parameters contained in the main header and stream header chunks.
rlm@10 1144
rlm@10 1145 d.writeUInt(0); // dwPaddingGranularity
rlm@10 1146 // Specifies the alignment for data, in bytes. Pad the data to multiples
rlm@10 1147 // of this value.
rlm@10 1148
rlm@10 1149 d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
rlm@10 1150 // Contains a bitwise combination of zero or more of the following
rlm@10 1151 // flags:
rlm@10 1152 //
rlm@10 1153 // Value Name Description
rlm@10 1154 // 0x10 AVIF_HASINDEX Indicates the AVI file has an index.
rlm@10 1155 // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the
rlm@10 1156 // index, rather than the physical ordering of the
rlm@10 1157 // chunks in the file, to determine the order of
rlm@10 1158 // presentation of the data. For example, this flag
rlm@10 1159 // could be used to create a list of frames for
rlm@10 1160 // editing.
rlm@10 1161 // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
rlm@10 1162 // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
rlm@10 1163 // allocated file used for capturing real-time
rlm@10 1164 // video. Applications should warn the user before
rlm@10 1165 // writing over a file with this flag set because
rlm@10 1166 // the user probably defragmented this file.
rlm@10 1167 // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
rlm@10 1168 // data and software. When this flag is used,
rlm@10 1169 // software should not permit the data to be
rlm@10 1170 // duplicated.
rlm@10 1171
rlm@10 1172 d.writeUInt(videoFrames.size()); // dwTotalFrames
rlm@10 1173 // Specifies the total number of frames of data in the file.
rlm@10 1174
rlm@10 1175 d.writeUInt(0); // dwInitialFrames
rlm@10 1176 // Specifies the initial frame for interleaved files. Noninterleaved
rlm@10 1177 // files should specify zero. If you are creating interleaved files,
rlm@10 1178 // specify the number of frames in the file prior to the initial frame
rlm@10 1179 // of the AVI sequence in this member.
rlm@10 1180 // To give the audio driver enough audio to work with, the audio data in
rlm@10 1181 // an interleaved file must be skewed from the video data. Typically,
rlm@10 1182 // the audio data should be moved forward enough frames to allow
rlm@10 1183 // approximately 0.75 seconds of audio data to be preloaded. The
rlm@10 1184 // dwInitialRecords member should be set to the number of frames the
rlm@10 1185 // audio is skewed. Also set the same value for the dwInitialFrames
rlm@10 1186 // member of the AVISTREAMHEADER structure in the audio stream header
rlm@10 1187
rlm@10 1188 d.writeUInt(1); // dwStreams
rlm@10 1189 // Specifies the number of streams in the file. For example, a file with
rlm@10 1190 // audio and video has two streams.
rlm@10 1191
rlm@10 1192 d.writeUInt(bufferSize); // dwSuggestedBufferSize
rlm@10 1193 // Specifies the suggested buffer size for reading the file. Generally,
rlm@10 1194 // this size should be large enough to contain the largest chunk in the
rlm@10 1195 // file. If set to zero, or if it is too small, the playback software
rlm@10 1196 // will have to reallocate memory during playback, which will reduce
rlm@10 1197 // performance. For an interleaved file, the buffer size should be large
rlm@10 1198 // enough to read an entire record, and not just a chunk.
rlm@10 1199
rlm@10 1200
rlm@10 1201 d.writeUInt(imgWidth); // dwWidth
rlm@10 1202 // Specifies the width of the AVI file in pixels.
rlm@10 1203
rlm@10 1204 d.writeUInt(imgHeight); // dwHeight
rlm@10 1205 // Specifies the height of the AVI file in pixels.
rlm@10 1206
rlm@10 1207 d.writeUInt(0); // dwReserved[0]
rlm@10 1208 d.writeUInt(0); // dwReserved[1]
rlm@10 1209 d.writeUInt(0); // dwReserved[2]
rlm@10 1210 d.writeUInt(0); // dwReserved[3]
rlm@10 1211 // Reserved. Set this array to zero.
rlm@10 1212
rlm@10 1213 /* Write Data into AVI Stream Header Chunk
rlm@10 1214 * -------------
rlm@10 1215 * The AVISTREAMHEADER structure contains information about one stream
rlm@10 1216 * in an AVI file.
rlm@10 1217 * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
rlm@10 1218 typedef struct _avistreamheader {
rlm@10 1219 FOURCC fcc;
rlm@10 1220 DWORD cb;
rlm@10 1221 FOURCC fccType;
rlm@10 1222 FOURCC fccHandler;
rlm@10 1223 DWORD dwFlags;
rlm@10 1224 WORD wPriority;
rlm@10 1225 WORD wLanguage;
rlm@10 1226 DWORD dwInitialFrames;
rlm@10 1227 DWORD dwScale;
rlm@10 1228 DWORD dwRate;
rlm@10 1229 DWORD dwStart;
rlm@10 1230 DWORD dwLength;
rlm@10 1231 DWORD dwSuggestedBufferSize;
rlm@10 1232 DWORD dwQuality;
rlm@10 1233 DWORD dwSampleSize;
rlm@10 1234 struct {
rlm@10 1235 short int left;
rlm@10 1236 short int top;
rlm@10 1237 short int right;
rlm@10 1238 short int bottom;
rlm@10 1239 } rcFrame;
rlm@10 1240 } AVISTREAMHEADER;
rlm@10 1241 */
rlm@10 1242 strhChunk.seekToStartOfData();
rlm@10 1243 d = strhChunk.getOutputStream();
rlm@10 1244 d.writeType("vids"); // fccType - vids for video stream
rlm@10 1245 // Contains a FOURCC that specifies the type of the data contained in
rlm@10 1246 // the stream. The following standard AVI values for video and audio are
rlm@10 1247 // defined:
rlm@10 1248 //
rlm@10 1249 // FOURCC Description
rlm@10 1250 // 'auds' Audio stream
rlm@10 1251 // 'mids' MIDI stream
rlm@10 1252 // 'txts' Text stream
rlm@10 1253 // 'vids' Video stream
rlm@10 1254
rlm@10 1255 switch (videoFormat) {
rlm@10 1256 case RAW:
rlm@10 1257 d.writeType("DIB "); // fccHandler - DIB for Raw RGB
rlm@10 1258 break;
rlm@10 1259 case RLE:
rlm@10 1260 d.writeType("RLE "); // fccHandler - Microsoft RLE
rlm@10 1261 break;
rlm@10 1262 case JPG:
rlm@10 1263 d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
rlm@10 1264 break;
rlm@10 1265 case PNG:
rlm@10 1266 default:
rlm@10 1267 d.writeType("png "); // fccHandler - png for PNG
rlm@10 1268 break;
rlm@10 1269 }
rlm@10 1270 // Optionally, contains a FOURCC that identifies a specific data
rlm@10 1271 // handler. The data handler is the preferred handler for the stream.
rlm@10 1272 // For audio and video streams, this specifies the codec for decoding
rlm@10 1273 // the stream.
rlm@10 1274
rlm@10 1275 if (imgDepth <= 8) {
rlm@10 1276 d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
rlm@10 1277 } else {
rlm@10 1278 d.writeUInt(0); // dwFlags
rlm@10 1279 }
rlm@10 1280
rlm@10 1281 // Contains any flags for the data stream. The bits in the high-order
rlm@10 1282 // word of these flags are specific to the type of data contained in the
rlm@10 1283 // stream. The following standard flags are defined:
rlm@10 1284 //
rlm@10 1285 // Value Name Description
rlm@10 1286 // AVISF_DISABLED 0x00000001 Indicates this stream should not
rlm@10 1287 // be enabled by default.
rlm@10 1288 // AVISF_VIDEO_PALCHANGES 0x00010000
rlm@10 1289 // Indicates this video stream contains
rlm@10 1290 // palette changes. This flag warns the playback
rlm@10 1291 // software that it will need to animate the
rlm@10 1292 // palette.
rlm@10 1293
rlm@10 1294 d.writeUShort(0); // wPriority
rlm@10 1295 // Specifies priority of a stream type. For example, in a file with
rlm@10 1296 // multiple audio streams, the one with the highest priority might be
rlm@10 1297 // the default stream.
rlm@10 1298
rlm@10 1299 d.writeUShort(0); // wLanguage
rlm@10 1300 // Language tag.
rlm@10 1301
rlm@10 1302 d.writeUInt(0); // dwInitialFrames
rlm@10 1303 // Specifies how far audio data is skewed ahead of the video frames in
rlm@10 1304 // interleaved files. Typically, this is about 0.75 seconds. If you are
rlm@10 1305 // creating interleaved files, specify the number of frames in the file
rlm@10 1306 // prior to the initial frame of the AVI sequence in this member. For
rlm@10 1307 // more information, see the remarks for the dwInitialFrames member of
rlm@10 1308 // the AVIMAINHEADER structure.
rlm@10 1309
rlm@10 1310 d.writeUInt(timeScale); // dwScale
rlm@10 1311 // Used with dwRate to specify the time scale that this stream will use.
rlm@10 1312 // Dividing dwRate by dwScale gives the number of samples per second.
rlm@10 1313 // For video streams, this is the frame rate. For audio streams, this
rlm@10 1314 // rate corresponds to the time needed to play nBlockAlign bytes of
rlm@10 1315 // audio, which for PCM audio is the just the sample rate.
rlm@10 1316
rlm@10 1317 d.writeUInt(frameRate); // dwRate
rlm@10 1318 // See dwScale.
rlm@10 1319
rlm@10 1320 d.writeUInt(0); // dwStart
rlm@10 1321 // Specifies the starting time for this stream. The units are defined by
rlm@10 1322 // the dwRate and dwScale members in the main file header. Usually, this
rlm@10 1323 // is zero, but it can specify a delay time for a stream that does not
rlm@10 1324 // start concurrently with the file.
rlm@10 1325
rlm@10 1326 d.writeUInt(videoFrames.size()); // dwLength
rlm@10 1327 // Specifies the length of this stream. The units are defined by the
rlm@10 1328 // dwRate and dwScale members of the stream's header.
rlm@10 1329
rlm@10 1330 d.writeUInt(bufferSize); // dwSuggestedBufferSize
rlm@10 1331 // Specifies how large a buffer should be used to read this stream.
rlm@10 1332 // Typically, this contains a value corresponding to the largest chunk
rlm@10 1333 // present in the stream. Using the correct buffer size makes playback
rlm@10 1334 // more efficient. Use zero if you do not know the correct buffer size.
rlm@10 1335
rlm@10 1336 d.writeInt(-1); // dwQuality
rlm@10 1337 // Specifies an indicator of the quality of the data in the stream.
rlm@10 1338 // Quality is represented as a number between 0 and 10,000.
rlm@10 1339 // For compressed data, this typically represents the value of the
rlm@10 1340 // quality parameter passed to the compression software. If set to –1,
rlm@10 1341 // drivers use the default quality value.
rlm@10 1342
rlm@10 1343 d.writeUInt(0); // dwSampleSize
rlm@10 1344 // Specifies the size of a single sample of data. This is set to zero
rlm@10 1345 // if the samples can vary in size. If this number is nonzero, then
rlm@10 1346 // multiple samples of data can be grouped into a single chunk within
rlm@10 1347 // the file. If it is zero, each sample of data (such as a video frame)
rlm@10 1348 // must be in a separate chunk. For video streams, this number is
rlm@10 1349 // typically zero, although it can be nonzero if all video frames are
rlm@10 1350 // the same size. For audio streams, this number should be the same as
rlm@10 1351 // the nBlockAlign member of the WAVEFORMATEX structure describing the
rlm@10 1352 // audio.
rlm@10 1353
rlm@10 1354 d.writeUShort(0); // rcFrame.left
rlm@10 1355 d.writeUShort(0); // rcFrame.top
rlm@10 1356 d.writeUShort(imgWidth); // rcFrame.right
rlm@10 1357 d.writeUShort(imgHeight); // rcFrame.bottom
rlm@10 1358 // Specifies the destination rectangle for a text or video stream within
rlm@10 1359 // the movie rectangle specified by the dwWidth and dwHeight members of
rlm@10 1360 // the AVI main header structure. The rcFrame member is typically used
rlm@10 1361 // in support of multiple video streams. Set this rectangle to the
rlm@10 1362 // coordinates corresponding to the movie rectangle to update the whole
rlm@10 1363 // movie rectangle. Units for this member are pixels. The upper-left
rlm@10 1364 // corner of the destination rectangle is relative to the upper-left
rlm@10 1365 // corner of the movie rectangle.
rlm@10 1366
rlm@10 1367 /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
rlm@10 1368 /* -------------
rlm@10 1369 * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
rlm@10 1370 typedef struct tagBITMAPINFOHEADER {
rlm@10 1371 DWORD biSize;
rlm@10 1372 LONG biWidth;
rlm@10 1373 LONG biHeight;
rlm@10 1374 WORD biPlanes;
rlm@10 1375 WORD biBitCount;
rlm@10 1376 DWORD biCompression;
rlm@10 1377 DWORD biSizeImage;
rlm@10 1378 LONG biXPelsPerMeter;
rlm@10 1379 LONG biYPelsPerMeter;
rlm@10 1380 DWORD biClrUsed;
rlm@10 1381 DWORD biClrImportant;
rlm@10 1382 } BITMAPINFOHEADER;
rlm@10 1383 */
rlm@10 1384 strfChunk.seekToStartOfData();
rlm@10 1385 d = strfChunk.getOutputStream();
rlm@10 1386 d.writeUInt(40); // biSize
rlm@10 1387 // Specifies the number of bytes required by the structure. This value
rlm@10 1388 // does not include the size of the color table or the size of the color
rlm@10 1389 // masks, if they are appended to the end of structure.
rlm@10 1390
rlm@10 1391 d.writeInt(imgWidth); // biWidth
rlm@10 1392 // Specifies the width of the bitmap, in pixels.
rlm@10 1393
rlm@10 1394 d.writeInt(imgHeight); // biHeight
rlm@10 1395 // Specifies the height of the bitmap, in pixels.
rlm@10 1396 //
rlm@10 1397 // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
rlm@10 1398 // a bottom-up DIB with the origin at the lower left corner. If biHeight
rlm@10 1399 // is negative, the bitmap is a top-down DIB with the origin at the
rlm@10 1400 // upper left corner.
rlm@10 1401 // For YUV bitmaps, the bitmap is always top-down, regardless of the
rlm@10 1402 // sign of biHeight. Decoders should offer YUV formats with postive
rlm@10 1403 // biHeight, but for backward compatibility they should accept YUV
rlm@10 1404 // formats with either positive or negative biHeight.
rlm@10 1405 // For compressed formats, biHeight must be positive, regardless of
rlm@10 1406 // image orientation.
rlm@10 1407
rlm@10 1408 d.writeShort(1); // biPlanes
rlm@10 1409 // Specifies the number of planes for the target device. This value must
rlm@10 1410 // be set to 1.
rlm@10 1411
rlm@10 1412 d.writeShort(imgDepth); // biBitCount
rlm@10 1413 // Specifies the number of bits per pixel (bpp). For uncompressed
rlm@10 1414 // formats, this value is the average number of bits per pixel. For
rlm@10 1415 // compressed formats, this value is the implied bit depth of the
rlm@10 1416 // uncompressed image, after the image has been decoded.
rlm@10 1417
rlm@10 1418 switch (videoFormat) {
rlm@10 1419 case RAW:
rlm@10 1420 default:
rlm@10 1421 d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
rlm@10 1422 break;
rlm@10 1423 case RLE:
rlm@10 1424 if (imgDepth == 8) {
rlm@10 1425 d.writeInt(1); // biCompression - BI_RLE8
rlm@10 1426 } else if (imgDepth == 4) {
rlm@10 1427 d.writeInt(2); // biCompression - BI_RLE4
rlm@10 1428 } else {
rlm@10 1429 throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
rlm@10 1430 }
rlm@10 1431 break;
rlm@10 1432 case JPG:
rlm@10 1433 d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
rlm@10 1434 break;
rlm@10 1435 case PNG:
rlm@10 1436 d.writeType("png "); // biCompression - png for PNG
rlm@10 1437 break;
rlm@10 1438 }
rlm@10 1439 // For compressed video and YUV formats, this member is a FOURCC code,
rlm@10 1440 // specified as a DWORD in little-endian order. For example, YUYV video
rlm@10 1441 // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
rlm@10 1442 // Codes.
rlm@10 1443 //
rlm@10 1444 // For uncompressed RGB formats, the following values are possible:
rlm@10 1445 //
rlm@10 1446 // Value Description
rlm@10 1447 // BI_RGB 0x00000000 Uncompressed RGB.
rlm@10 1448 // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
rlm@10 1449 // Valid for 16-bpp and 32-bpp bitmaps.
rlm@10 1450 //
rlm@10 1451 // Note that BI_JPG and BI_PNG are not valid video formats.
rlm@10 1452 //
rlm@10 1453 // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
rlm@10 1454 // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
rlm@10 1455 // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
rlm@10 1456 // structure to determine the specific RGB type.
rlm@10 1457
rlm@10 1458 switch (videoFormat) {
rlm@10 1459 case RAW:
rlm@10 1460 d.writeInt(0); // biSizeImage
rlm@10 1461 break;
rlm@10 1462 case RLE:
rlm@10 1463 case JPG:
rlm@10 1464 case PNG:
rlm@10 1465 default:
rlm@10 1466 if (imgDepth == 4) {
rlm@10 1467 d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
rlm@10 1468 } else {
rlm@10 1469 int bytesPerPixel = Math.max(1, imgDepth / 8);
rlm@10 1470 d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
rlm@10 1471 }
rlm@10 1472 break;
rlm@10 1473 }
rlm@10 1474 // Specifies the size, in bytes, of the image. This can be set to 0 for
rlm@10 1475 // uncompressed RGB bitmaps.
rlm@10 1476
rlm@10 1477 d.writeInt(0); // biXPelsPerMeter
rlm@10 1478 // Specifies the horizontal resolution, in pixels per meter, of the
rlm@10 1479 // target device for the bitmap.
rlm@10 1480
rlm@10 1481 d.writeInt(0); // biYPelsPerMeter
rlm@10 1482 // Specifies the vertical resolution, in pixels per meter, of the target
rlm@10 1483 // device for the bitmap.
rlm@10 1484
rlm@10 1485 d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
rlm@10 1486 // Specifies the number of color indices in the color table that are
rlm@10 1487 // actually used by the bitmap.
rlm@10 1488
rlm@10 1489 d.writeInt(0); // biClrImportant
rlm@10 1490 // Specifies the number of color indices that are considered important
rlm@10 1491 // for displaying the bitmap. If this value is zero, all colors are
rlm@10 1492 // important.
rlm@10 1493
rlm@10 1494 if (palette != null) {
rlm@10 1495 for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
rlm@10 1496 /*
rlm@10 1497 * typedef struct tagRGBQUAD {
rlm@10 1498 BYTE rgbBlue;
rlm@10 1499 BYTE rgbGreen;
rlm@10 1500 BYTE rgbRed;
rlm@10 1501 BYTE rgbReserved; // This member is reserved and must be zero.
rlm@10 1502 } RGBQUAD;
rlm@10 1503 */
rlm@10 1504 d.write(palette.getBlue(i));
rlm@10 1505 d.write(palette.getGreen(i));
rlm@10 1506 d.write(palette.getRed(i));
rlm@10 1507 d.write(0);
rlm@10 1508 }
rlm@10 1509 }
rlm@10 1510
rlm@10 1511
rlm@10 1512 // -----------------
rlm@10 1513 aviChunk.finish();
rlm@10 1514 }
rlm@10 1515 }