annotate src/com/aurellem/capture/video/AVIOutputStream.java @ 9:5dfc9e768816

moved files
author Robert McIntyre <rlm@mit.edu>
date Wed, 26 Oct 2011 08:54:12 -0700
parents
children
rev   line source
rlm@9 1 /**
rlm@9 2 * @(#)AVIOutputStream.java 1.5.1 2011-01-17
rlm@9 3 *
rlm@9 4 * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
rlm@9 5 * All rights reserved.
rlm@9 6 *
rlm@9 7 * You may not use, copy or modify this file, except in compliance with the
rlm@9 8 * license agreement you entered into with Werner Randelshofer.
rlm@9 9 * For details see accompanying license terms.
rlm@9 10 */
rlm@9 11 package com.aurellem.capture.video;
rlm@9 12
rlm@9 13 import java.awt.Dimension;
rlm@9 14 import java.awt.image.BufferedImage;
rlm@9 15 import java.awt.image.DataBufferByte;
rlm@9 16 import java.awt.image.IndexColorModel;
rlm@9 17 import java.awt.image.WritableRaster;
rlm@9 18 import java.io.File;
rlm@9 19 import java.io.FileInputStream;
rlm@9 20 import java.io.IOException;
rlm@9 21 import java.io.InputStream;
rlm@9 22 import java.io.OutputStream;
rlm@9 23 import java.util.Arrays;
rlm@9 24 import java.util.Date;
rlm@9 25 import java.util.LinkedList;
rlm@9 26
rlm@9 27 import javax.imageio.IIOImage;
rlm@9 28 import javax.imageio.ImageIO;
rlm@9 29 import javax.imageio.ImageWriteParam;
rlm@9 30 import javax.imageio.ImageWriter;
rlm@9 31 import javax.imageio.stream.FileImageOutputStream;
rlm@9 32 import javax.imageio.stream.ImageOutputStream;
rlm@9 33 import javax.imageio.stream.MemoryCacheImageOutputStream;
rlm@9 34
rlm@9 35 /**
rlm@9 36 * This class supports writing of images into an AVI 1.0 video file.
rlm@9 37 * <p>
rlm@9 38 * The images are written as video frames.
rlm@9 39 * <p>
rlm@9 40 * Video frames can be encoded with one of the following formats:
rlm@9 41 * <ul>
rlm@9 42 * <li>JPEG</li>
rlm@9 43 * <li>PNG</li>
rlm@9 44 * <li>RAW</li>
rlm@9 45 * <li>RLE</li>
rlm@9 46 * </ul>
rlm@9 47 * All frames must have the same format.
rlm@9 48 * When JPG is used each frame can have an individual encoding quality.
rlm@9 49 * <p>
rlm@9 50 * All frames in an AVI file must have the same duration. The duration can
rlm@9 51 * be set by setting an appropriate pair of values using methods
rlm@9 52 * {@link #setFrameRate} and {@link #setTimeScale}.
rlm@9 53 * <p>
rlm@9 54 * The length of an AVI 1.0 file is limited to 1 GB.
rlm@9 55 * This class supports lengths of up to 4 GB, but such files may not work on
rlm@9 56 * all players.
rlm@9 57 * <p>
rlm@9 58 * For detailed information about the AVI RIFF file format see:<br>
rlm@9 59 * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
rlm@9 60 * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
rlm@9 61 * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
rlm@9 62 *
rlm@9 63 * @author Werner Randelshofer
rlm@9 64 * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
rlm@9 65 * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
rlm@9 66 * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
rlm@9 67 * in "idx1" chunk.
rlm@9 68 * <br>1.3.2 2010-12-27 File size limit is 1 GB.
rlm@9 69 * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
rlm@9 70 * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
rlm@9 71 * Added method getVideoDimension().
rlm@9 72 * <br>1.2 2009-08-29 Adds support for RAW video format.
rlm@9 73 * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
rlm@9 74 * chunk. Changed the API to reflect that AVI works with frame rates instead of
rlm@9 75 * with frame durations.
rlm@9 76 * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
rlm@9 77 * encoded video.
rlm@9 78 * <br>1.0 2008-08-11 Created.
rlm@9 79 */
rlm@9 80 public class AVIOutputStream {
rlm@9 81
rlm@9 82 /**
rlm@9 83 * Underlying output stream.
rlm@9 84 */
rlm@9 85 private ImageOutputStream out;
rlm@9 86 /** The offset of the QuickTime stream in the underlying ImageOutputStream.
rlm@9 87 * Normally this is 0 unless the underlying stream already contained data
rlm@9 88 * when it was passed to the constructor.
rlm@9 89 */
rlm@9 90 private long streamOffset;
rlm@9 91 /** Previous frame for delta compression. */
rlm@9 92 private Object previousData;
rlm@9 93
rlm@9 94 /**
rlm@9 95 * Supported video encodings.
rlm@9 96 */
rlm@9 97 public static enum VideoFormat {
rlm@9 98
rlm@9 99 RAW, RLE, JPG, PNG;
rlm@9 100 }
rlm@9 101 /**
rlm@9 102 * Current video formats.
rlm@9 103 */
rlm@9 104 private VideoFormat videoFormat;
rlm@9 105 /**
rlm@9 106 * Quality of JPEG encoded video frames.
rlm@9 107 */
rlm@9 108 private float quality = 0.9f;
rlm@9 109 /**
rlm@9 110 * Creation time of the movie output stream.
rlm@9 111 */
rlm@9 112 private Date creationTime;
rlm@9 113 /**
rlm@9 114 * Width of the video frames. All frames must have the same width.
rlm@9 115 * The value -1 is used to mark unspecified width.
rlm@9 116 */
rlm@9 117 private int imgWidth = -1;
rlm@9 118 /**
rlm@9 119 * Height of the video frames. All frames must have the same height.
rlm@9 120 * The value -1 is used to mark unspecified height.
rlm@9 121 */
rlm@9 122 private int imgHeight = -1;
rlm@9 123 /** Number of bits per pixel. */
rlm@9 124 private int imgDepth = 24;
rlm@9 125 /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
rlm@9 126 private IndexColorModel palette;
rlm@9 127 private IndexColorModel previousPalette;
rlm@9 128 /** Video encoder. */
rlm@9 129
rlm@9 130 /**
rlm@9 131 * The timeScale of the movie.
rlm@9 132 * <p>
rlm@9 133 * Used with frameRate to specify the time scale that this stream will use.
rlm@9 134 * Dividing frameRate by timeScale gives the number of samples per second.
rlm@9 135 * For video streams, this is the frame rate. For audio streams, this rate
rlm@9 136 * corresponds to the time needed to play nBlockAlign bytes of audio, which
rlm@9 137 * for PCM audio is the just the sample rate.
rlm@9 138 */
rlm@9 139 private int timeScale = 1;
rlm@9 140 /**
rlm@9 141 * The frameRate of the movie in timeScale units.
rlm@9 142 * <p>
rlm@9 143 * @see timeScale
rlm@9 144 */
rlm@9 145 private int frameRate = 30;
rlm@9 146 /** Interval between keyframes. */
rlm@9 147 private int syncInterval = 30;
rlm@9 148
rlm@9 149 /**
rlm@9 150 * The states of the movie output stream.
rlm@9 151 */
rlm@9 152 private static enum States {
rlm@9 153
rlm@9 154 STARTED, FINISHED, CLOSED;
rlm@9 155 }
rlm@9 156 /**
rlm@9 157 * The current state of the movie output stream.
rlm@9 158 */
rlm@9 159 private States state = States.FINISHED;
rlm@9 160
rlm@9 161 /**
rlm@9 162 * AVI stores media data in samples.
rlm@9 163 * A sample is a single element in a sequence of time-ordered data.
rlm@9 164 */
rlm@9 165 private static class Sample {
rlm@9 166
rlm@9 167 String chunkType;
rlm@9 168 /** Offset of the sample relative to the start of the AVI file.
rlm@9 169 */
rlm@9 170 long offset;
rlm@9 171 /** Data length of the sample. */
rlm@9 172 long length;
rlm@9 173 /**
rlm@9 174 * The duration of the sample in time scale units.
rlm@9 175 */
rlm@9 176 int duration;
rlm@9 177 /** Whether the sample is a sync-sample. */
rlm@9 178 boolean isSync;
rlm@9 179
rlm@9 180 /**
rlm@9 181 * Creates a new sample.
rlm@9 182 * @param duration
rlm@9 183 * @param offset
rlm@9 184 * @param length
rlm@9 185 */
rlm@9 186 public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
rlm@9 187 this.chunkType = chunkId;
rlm@9 188 this.duration = duration;
rlm@9 189 this.offset = offset;
rlm@9 190 this.length = length;
rlm@9 191 this.isSync = isSync;
rlm@9 192 }
rlm@9 193 }
rlm@9 194 /**
rlm@9 195 * List of video frames.
rlm@9 196 */
rlm@9 197 private LinkedList<Sample> videoFrames;
rlm@9 198 /**
rlm@9 199 * This chunk holds the whole AVI content.
rlm@9 200 */
rlm@9 201 private CompositeChunk aviChunk;
rlm@9 202 /**
rlm@9 203 * This chunk holds the movie frames.
rlm@9 204 */
rlm@9 205 private CompositeChunk moviChunk;
rlm@9 206 /**
rlm@9 207 * This chunk holds the AVI Main Header.
rlm@9 208 */
rlm@9 209 FixedSizeDataChunk avihChunk;
rlm@9 210 /**
rlm@9 211 * This chunk holds the AVI Stream Header.
rlm@9 212 */
rlm@9 213 FixedSizeDataChunk strhChunk;
rlm@9 214 /**
rlm@9 215 * This chunk holds the AVI Stream Format Header.
rlm@9 216 */
rlm@9 217 FixedSizeDataChunk strfChunk;
rlm@9 218
rlm@9 219 /**
rlm@9 220 * Chunk base class.
rlm@9 221 */
rlm@9 222 private abstract class Chunk {
rlm@9 223
rlm@9 224 /**
rlm@9 225 * The chunkType of the chunk. A String with the length of 4 characters.
rlm@9 226 */
rlm@9 227 protected String chunkType;
rlm@9 228 /**
rlm@9 229 * The offset of the chunk relative to the start of the
rlm@9 230 * ImageOutputStream.
rlm@9 231 */
rlm@9 232 protected long offset;
rlm@9 233
rlm@9 234 /**
rlm@9 235 * Creates a new Chunk at the current position of the ImageOutputStream.
rlm@9 236 * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
rlm@9 237 */
rlm@9 238 public Chunk(String chunkType) throws IOException {
rlm@9 239 this.chunkType = chunkType;
rlm@9 240 offset = getRelativeStreamPosition();
rlm@9 241 }
rlm@9 242
rlm@9 243 /**
rlm@9 244 * Writes the chunk to the ImageOutputStream and disposes it.
rlm@9 245 */
rlm@9 246 public abstract void finish() throws IOException;
rlm@9 247
rlm@9 248 /**
rlm@9 249 * Returns the size of the chunk including the size of the chunk header.
rlm@9 250 * @return The size of the chunk.
rlm@9 251 */
rlm@9 252 public abstract long size();
rlm@9 253 }
rlm@9 254
rlm@9 255 /**
rlm@9 256 * A CompositeChunk contains an ordered list of Chunks.
rlm@9 257 */
rlm@9 258 private class CompositeChunk extends Chunk {
rlm@9 259
rlm@9 260 /**
rlm@9 261 * The type of the composite. A String with the length of 4 characters.
rlm@9 262 */
rlm@9 263 protected String compositeType;
rlm@9 264 private LinkedList<Chunk> children;
rlm@9 265 private boolean finished;
rlm@9 266
rlm@9 267 /**
rlm@9 268 * Creates a new CompositeChunk at the current position of the
rlm@9 269 * ImageOutputStream.
rlm@9 270 * @param compositeType The type of the composite.
rlm@9 271 * @param chunkType The type of the chunk.
rlm@9 272 */
rlm@9 273 public CompositeChunk(String compositeType, String chunkType) throws IOException {
rlm@9 274 super(chunkType);
rlm@9 275 this.compositeType = compositeType;
rlm@9 276 //out.write
rlm@9 277 out.writeLong(0); // make room for the chunk header
rlm@9 278 out.writeInt(0); // make room for the chunk header
rlm@9 279 children = new LinkedList<Chunk>();
rlm@9 280 }
rlm@9 281
rlm@9 282 public void add(Chunk child) throws IOException {
rlm@9 283 if (children.size() > 0) {
rlm@9 284 children.getLast().finish();
rlm@9 285 }
rlm@9 286 children.add(child);
rlm@9 287 }
rlm@9 288
rlm@9 289 /**
rlm@9 290 * Writes the chunk and all its children to the ImageOutputStream
rlm@9 291 * and disposes of all resources held by the chunk.
rlm@9 292 * @throws java.io.IOException
rlm@9 293 */
rlm@9 294 @Override
rlm@9 295 public void finish() throws IOException {
rlm@9 296 if (!finished) {
rlm@9 297 if (size() > 0xffffffffL) {
rlm@9 298 throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
rlm@9 299 }
rlm@9 300
rlm@9 301 long pointer = getRelativeStreamPosition();
rlm@9 302 seekRelative(offset);
rlm@9 303
rlm@9 304 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@9 305 headerData.writeType(compositeType);
rlm@9 306 headerData.writeUInt(size() - 8);
rlm@9 307 headerData.writeType(chunkType);
rlm@9 308 for (Chunk child : children) {
rlm@9 309 child.finish();
rlm@9 310 }
rlm@9 311 seekRelative(pointer);
rlm@9 312 if (size() % 2 == 1) {
rlm@9 313 out.writeByte(0); // write pad byte
rlm@9 314 }
rlm@9 315 finished = true;
rlm@9 316 }
rlm@9 317 }
rlm@9 318
rlm@9 319 @Override
rlm@9 320 public long size() {
rlm@9 321 long length = 12;
rlm@9 322 for (Chunk child : children) {
rlm@9 323 length += child.size() + child.size() % 2;
rlm@9 324 }
rlm@9 325 return length;
rlm@9 326 }
rlm@9 327 }
rlm@9 328
rlm@9 329 /**
rlm@9 330 * Data Chunk.
rlm@9 331 */
rlm@9 332 private class DataChunk extends Chunk {
rlm@9 333
rlm@9 334 private DataChunkOutputStream data;
rlm@9 335 private boolean finished;
rlm@9 336
rlm@9 337 /**
rlm@9 338 * Creates a new DataChunk at the current position of the
rlm@9 339 * ImageOutputStream.
rlm@9 340 * @param chunkType The chunkType of the chunk.
rlm@9 341 */
rlm@9 342 public DataChunk(String name) throws IOException {
rlm@9 343 super(name);
rlm@9 344 out.writeLong(0); // make room for the chunk header
rlm@9 345 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
rlm@9 346 }
rlm@9 347
rlm@9 348 public DataChunkOutputStream getOutputStream() {
rlm@9 349 if (finished) {
rlm@9 350 throw new IllegalStateException("DataChunk is finished");
rlm@9 351 }
rlm@9 352 return data;
rlm@9 353 }
rlm@9 354
rlm@9 355 /**
rlm@9 356 * Returns the offset of this chunk to the beginning of the random access file
rlm@9 357 * @return
rlm@9 358 */
rlm@9 359 public long getOffset() {
rlm@9 360 return offset;
rlm@9 361 }
rlm@9 362
rlm@9 363 @Override
rlm@9 364 public void finish() throws IOException {
rlm@9 365 if (!finished) {
rlm@9 366 long sizeBefore = size();
rlm@9 367
rlm@9 368 if (size() > 0xffffffffL) {
rlm@9 369 throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
rlm@9 370 }
rlm@9 371
rlm@9 372 long pointer = getRelativeStreamPosition();
rlm@9 373 seekRelative(offset);
rlm@9 374
rlm@9 375 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@9 376 headerData.writeType(chunkType);
rlm@9 377 headerData.writeUInt(size() - 8);
rlm@9 378 seekRelative(pointer);
rlm@9 379 if (size() % 2 == 1) {
rlm@9 380 out.writeByte(0); // write pad byte
rlm@9 381 }
rlm@9 382 finished = true;
rlm@9 383 long sizeAfter = size();
rlm@9 384 if (sizeBefore != sizeAfter) {
rlm@9 385 System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
rlm@9 386 }
rlm@9 387 }
rlm@9 388 }
rlm@9 389
rlm@9 390 @Override
rlm@9 391 public long size() {
rlm@9 392 return 8 + data.size();
rlm@9 393 }
rlm@9 394 }
rlm@9 395
rlm@9 396 /**
rlm@9 397 * A DataChunk with a fixed size.
rlm@9 398 */
rlm@9 399 private class FixedSizeDataChunk extends Chunk {
rlm@9 400
rlm@9 401 private DataChunkOutputStream data;
rlm@9 402 private boolean finished;
rlm@9 403 private long fixedSize;
rlm@9 404
rlm@9 405 /**
rlm@9 406 * Creates a new DataChunk at the current position of the
rlm@9 407 * ImageOutputStream.
rlm@9 408 * @param chunkType The chunkType of the chunk.
rlm@9 409 */
rlm@9 410 public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
rlm@9 411 super(chunkType);
rlm@9 412 this.fixedSize = fixedSize;
rlm@9 413 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@9 414 data.writeType(chunkType);
rlm@9 415 data.writeUInt(fixedSize);
rlm@9 416 data.clearCount();
rlm@9 417
rlm@9 418 // Fill fixed size with nulls
rlm@9 419 byte[] buf = new byte[(int) Math.min(512, fixedSize)];
rlm@9 420 long written = 0;
rlm@9 421 while (written < fixedSize) {
rlm@9 422 data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
rlm@9 423 written += Math.min(buf.length, fixedSize - written);
rlm@9 424 }
rlm@9 425 if (fixedSize % 2 == 1) {
rlm@9 426 out.writeByte(0); // write pad byte
rlm@9 427 }
rlm@9 428 seekToStartOfData();
rlm@9 429 }
rlm@9 430
rlm@9 431 public DataChunkOutputStream getOutputStream() {
rlm@9 432 /*if (finished) {
rlm@9 433 throw new IllegalStateException("DataChunk is finished");
rlm@9 434 }*/
rlm@9 435 return data;
rlm@9 436 }
rlm@9 437
rlm@9 438 /**
rlm@9 439 * Returns the offset of this chunk to the beginning of the random access file
rlm@9 440 * @return
rlm@9 441 */
rlm@9 442 public long getOffset() {
rlm@9 443 return offset;
rlm@9 444 }
rlm@9 445
rlm@9 446 public void seekToStartOfData() throws IOException {
rlm@9 447 seekRelative(offset + 8);
rlm@9 448 data.clearCount();
rlm@9 449 }
rlm@9 450
rlm@9 451 public void seekToEndOfChunk() throws IOException {
rlm@9 452 seekRelative(offset + 8 + fixedSize + fixedSize % 2);
rlm@9 453 }
rlm@9 454
rlm@9 455 @Override
rlm@9 456 public void finish() throws IOException {
rlm@9 457 if (!finished) {
rlm@9 458 finished = true;
rlm@9 459 }
rlm@9 460 }
rlm@9 461
rlm@9 462 @Override
rlm@9 463 public long size() {
rlm@9 464 return 8 + fixedSize;
rlm@9 465 }
rlm@9 466 }
rlm@9 467
rlm@9 468 /**
rlm@9 469 * Creates a new AVI file with the specified video format and
rlm@9 470 * frame rate. The video has 24 bits per pixel.
rlm@9 471 *
rlm@9 472 * @param file the output file
rlm@9 473 * @param format Selects an encoder for the video format.
rlm@9 474 * @param bitsPerPixel the number of bits per pixel.
rlm@9 475 * @exception IllegalArgumentException if videoFormat is null or if
rlm@9 476 * frame rate is <= 0
rlm@9 477 */
rlm@9 478 public AVIOutputStream(File file, VideoFormat format) throws IOException {
rlm@9 479 this(file,format,24);
rlm@9 480 }
rlm@9 481 /**
rlm@9 482 * Creates a new AVI file with the specified video format and
rlm@9 483 * frame rate.
rlm@9 484 *
rlm@9 485 * @param file the output file
rlm@9 486 * @param format Selects an encoder for the video format.
rlm@9 487 * @param bitsPerPixel the number of bits per pixel.
rlm@9 488 * @exception IllegalArgumentException if videoFormat is null or if
rlm@9 489 * frame rate is <= 0
rlm@9 490 */
rlm@9 491 public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
rlm@9 492 if (format == null) {
rlm@9 493 throw new IllegalArgumentException("format must not be null");
rlm@9 494 }
rlm@9 495
rlm@9 496 if (file.exists()) {
rlm@9 497 file.delete();
rlm@9 498 }
rlm@9 499 this.out = new FileImageOutputStream(file);
rlm@9 500 this.streamOffset = 0;
rlm@9 501 this.videoFormat = format;
rlm@9 502 this.videoFrames = new LinkedList<Sample>();
rlm@9 503 this.imgDepth = bitsPerPixel;
rlm@9 504 if (imgDepth == 4) {
rlm@9 505 byte[] gray = new byte[16];
rlm@9 506 for (int i = 0; i < gray.length; i++) {
rlm@9 507 gray[i] = (byte) ((i << 4) | i);
rlm@9 508 }
rlm@9 509 palette = new IndexColorModel(4, 16, gray, gray, gray);
rlm@9 510 } else if (imgDepth == 8) {
rlm@9 511 byte[] gray = new byte[256];
rlm@9 512 for (int i = 0; i < gray.length; i++) {
rlm@9 513 gray[i] = (byte) i;
rlm@9 514 }
rlm@9 515 palette = new IndexColorModel(8, 256, gray, gray, gray);
rlm@9 516 }
rlm@9 517
rlm@9 518 }
rlm@9 519
rlm@9 520 /**
rlm@9 521 * Creates a new AVI output stream with the specified video format and
rlm@9 522 * framerate.
rlm@9 523 *
rlm@9 524 * @param out the underlying output stream
rlm@9 525 * @param format Selects an encoder for the video format.
rlm@9 526 * @exception IllegalArgumentException if videoFormat is null or if
rlm@9 527 * framerate is <= 0
rlm@9 528 */
rlm@9 529 public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
rlm@9 530 if (format == null) {
rlm@9 531 throw new IllegalArgumentException("format must not be null");
rlm@9 532 }
rlm@9 533 this.out = out;
rlm@9 534 this.streamOffset = out.getStreamPosition();
rlm@9 535 this.videoFormat = format;
rlm@9 536 this.videoFrames = new LinkedList<Sample>();
rlm@9 537 }
rlm@9 538
rlm@9 539 /**
rlm@9 540 * Used with frameRate to specify the time scale that this stream will use.
rlm@9 541 * Dividing frameRate by timeScale gives the number of samples per second.
rlm@9 542 * For video streams, this is the frame rate. For audio streams, this rate
rlm@9 543 * corresponds to the time needed to play nBlockAlign bytes of audio, which
rlm@9 544 * for PCM audio is the just the sample rate.
rlm@9 545 * <p>
rlm@9 546 * The default value is 1.
rlm@9 547 *
rlm@9 548 * @param newValue
rlm@9 549 */
rlm@9 550 public void setTimeScale(int newValue) {
rlm@9 551 if (newValue <= 0) {
rlm@9 552 throw new IllegalArgumentException("timeScale must be greater 0");
rlm@9 553 }
rlm@9 554 this.timeScale = newValue;
rlm@9 555 }
rlm@9 556
rlm@9 557 /**
rlm@9 558 * Returns the time scale of this media.
rlm@9 559 *
rlm@9 560 * @return time scale
rlm@9 561 */
rlm@9 562 public int getTimeScale() {
rlm@9 563 return timeScale;
rlm@9 564 }
rlm@9 565
rlm@9 566 /**
rlm@9 567 * Sets the rate of video frames in time scale units.
rlm@9 568 * <p>
rlm@9 569 * The default value is 30. Together with the default value 1 of timeScale
rlm@9 570 * this results in 30 frames pers second.
rlm@9 571 *
rlm@9 572 * @param newValue
rlm@9 573 */
rlm@9 574 public void setFrameRate(int newValue) {
rlm@9 575 if (newValue <= 0) {
rlm@9 576 throw new IllegalArgumentException("frameDuration must be greater 0");
rlm@9 577 }
rlm@9 578 if (state == States.STARTED) {
rlm@9 579 throw new IllegalStateException("frameDuration must be set before the first frame is written");
rlm@9 580 }
rlm@9 581 this.frameRate = newValue;
rlm@9 582 }
rlm@9 583
rlm@9 584 /**
rlm@9 585 * Returns the frame rate of this media.
rlm@9 586 *
rlm@9 587 * @return frame rate
rlm@9 588 */
rlm@9 589 public int getFrameRate() {
rlm@9 590 return frameRate;
rlm@9 591 }
rlm@9 592
rlm@9 593 /** Sets the global color palette. */
rlm@9 594 public void setPalette(IndexColorModel palette) {
rlm@9 595 this.palette = palette;
rlm@9 596 }
rlm@9 597
rlm@9 598 /**
rlm@9 599 * Sets the compression quality of the video track.
rlm@9 600 * A value of 0 stands for "high compression is important" a value of
rlm@9 601 * 1 for "high image quality is important".
rlm@9 602 * <p>
rlm@9 603 * Changing this value affects frames which are subsequently written
rlm@9 604 * to the AVIOutputStream. Frames which have already been written
rlm@9 605 * are not changed.
rlm@9 606 * <p>
rlm@9 607 * This value has only effect on videos encoded with JPG format.
rlm@9 608 * <p>
rlm@9 609 * The default value is 0.9.
rlm@9 610 *
rlm@9 611 * @param newValue
rlm@9 612 */
rlm@9 613 public void setVideoCompressionQuality(float newValue) {
rlm@9 614 this.quality = newValue;
rlm@9 615 }
rlm@9 616
rlm@9 617 /**
rlm@9 618 * Returns the video compression quality.
rlm@9 619 *
rlm@9 620 * @return video compression quality
rlm@9 621 */
rlm@9 622 public float getVideoCompressionQuality() {
rlm@9 623 return quality;
rlm@9 624 }
rlm@9 625
rlm@9 626 /**
rlm@9 627 * Sets the dimension of the video track.
rlm@9 628 * <p>
rlm@9 629 * You need to explicitly set the dimension, if you add all frames from
rlm@9 630 * files or input streams.
rlm@9 631 * <p>
rlm@9 632 * If you add frames from buffered images, then AVIOutputStream
rlm@9 633 * can determine the video dimension from the image width and height.
rlm@9 634 *
rlm@9 635 * @param width Must be greater than 0.
rlm@9 636 * @param height Must be greater than 0.
rlm@9 637 */
rlm@9 638 public void setVideoDimension(int width, int height) {
rlm@9 639 if (width < 1 || height < 1) {
rlm@9 640 throw new IllegalArgumentException("width and height must be greater zero.");
rlm@9 641 }
rlm@9 642 this.imgWidth = width;
rlm@9 643 this.imgHeight = height;
rlm@9 644 }
rlm@9 645
rlm@9 646 /**
rlm@9 647 * Gets the dimension of the video track.
rlm@9 648 * <p>
rlm@9 649 * Returns null if the dimension is not known.
rlm@9 650 */
rlm@9 651 public Dimension getVideoDimension() {
rlm@9 652 if (imgWidth < 1 || imgHeight < 1) {
rlm@9 653 return null;
rlm@9 654 }
rlm@9 655 return new Dimension(imgWidth, imgHeight);
rlm@9 656 }
rlm@9 657
rlm@9 658 /**
rlm@9 659 * Sets the state of the QuickTimeOutpuStream to started.
rlm@9 660 * <p>
rlm@9 661 * If the state is changed by this method, the prolog is
rlm@9 662 * written.
rlm@9 663 */
rlm@9 664 private void ensureStarted() throws IOException {
rlm@9 665 if (state != States.STARTED) {
rlm@9 666 creationTime = new Date();
rlm@9 667 writeProlog();
rlm@9 668 state = States.STARTED;
rlm@9 669 }
rlm@9 670 }
rlm@9 671
rlm@9 672 /**
rlm@9 673 * Writes a frame to the video track.
rlm@9 674 * <p>
rlm@9 675 * If the dimension of the video track has not been specified yet, it
rlm@9 676 * is derived from the first buffered image added to the AVIOutputStream.
rlm@9 677 *
rlm@9 678 * @param image The frame image.
rlm@9 679 *
rlm@9 680 * @throws IllegalArgumentException if the duration is less than 1, or
rlm@9 681 * if the dimension of the frame does not match the dimension of the video
rlm@9 682 * track.
rlm@9 683 * @throws IOException if writing the image failed.
rlm@9 684 */
rlm@9 685 public void writeFrame(BufferedImage image) throws IOException {
rlm@9 686 ensureOpen();
rlm@9 687 ensureStarted();
rlm@9 688
rlm@9 689 // Get the dimensions of the first image
rlm@9 690 if (imgWidth == -1) {
rlm@9 691 imgWidth = image.getWidth();
rlm@9 692 imgHeight = image.getHeight();
rlm@9 693 } else {
rlm@9 694 // The dimension of the image must match the dimension of the video track
rlm@9 695 if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
rlm@9 696 throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
rlm@9 697 + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
rlm@9 698 + ") differs from image[0] (width="
rlm@9 699 + imgWidth + ", height=" + imgHeight);
rlm@9 700 }
rlm@9 701 }
rlm@9 702
rlm@9 703 DataChunk videoFrameChunk;
rlm@9 704 long offset = getRelativeStreamPosition();
rlm@9 705 boolean isSync = true;
rlm@9 706 switch (videoFormat) {
rlm@9 707 case RAW: {
rlm@9 708 switch (imgDepth) {
rlm@9 709 case 4: {
rlm@9 710 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
rlm@9 711 int[] imgRGBs = new int[16];
rlm@9 712 imgPalette.getRGBs(imgRGBs);
rlm@9 713 int[] previousRGBs = new int[16];
rlm@9 714 if (previousPalette == null) {
rlm@9 715 previousPalette = palette;
rlm@9 716 }
rlm@9 717 previousPalette.getRGBs(previousRGBs);
rlm@9 718 if (!Arrays.equals(imgRGBs, previousRGBs)) {
rlm@9 719 previousPalette = imgPalette;
rlm@9 720 DataChunk paletteChangeChunk = new DataChunk("00pc");
rlm@9 721 /*
rlm@9 722 int first = imgPalette.getMapSize();
rlm@9 723 int last = -1;
rlm@9 724 for (int i = 0; i < 16; i++) {
rlm@9 725 if (previousRGBs[i] != imgRGBs[i] && i < first) {
rlm@9 726 first = i;
rlm@9 727 }
rlm@9 728 if (previousRGBs[i] != imgRGBs[i] && i > last) {
rlm@9 729 last = i;
rlm@9 730 }
rlm@9 731 }*/
rlm@9 732 int first = 0;
rlm@9 733 int last = imgPalette.getMapSize() - 1;
rlm@9 734 /*
rlm@9 735 * typedef struct {
rlm@9 736 BYTE bFirstEntry;
rlm@9 737 BYTE bNumEntries;
rlm@9 738 WORD wFlags;
rlm@9 739 PALETTEENTRY peNew[];
rlm@9 740 } AVIPALCHANGE;
rlm@9 741 *
rlm@9 742 * typedef struct tagPALETTEENTRY {
rlm@9 743 BYTE peRed;
rlm@9 744 BYTE peGreen;
rlm@9 745 BYTE peBlue;
rlm@9 746 BYTE peFlags;
rlm@9 747 } PALETTEENTRY;
rlm@9 748 */
rlm@9 749 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
rlm@9 750 pOut.writeByte(first);//bFirstEntry
rlm@9 751 pOut.writeByte(last - first + 1);//bNumEntries
rlm@9 752 pOut.writeShort(0);//wFlags
rlm@9 753
rlm@9 754 for (int i = first; i <= last; i++) {
rlm@9 755 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
rlm@9 756 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
rlm@9 757 pOut.writeByte(imgRGBs[i] & 0xff); // blue
rlm@9 758 pOut.writeByte(0); // reserved*/
rlm@9 759 }
rlm@9 760
rlm@9 761 moviChunk.add(paletteChangeChunk);
rlm@9 762 paletteChangeChunk.finish();
rlm@9 763 long length = getRelativeStreamPosition() - offset;
rlm@9 764 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
rlm@9 765 offset = getRelativeStreamPosition();
rlm@9 766 }
rlm@9 767
rlm@9 768 videoFrameChunk = new DataChunk("00db");
rlm@9 769 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
rlm@9 770 byte[] rgb4 = new byte[imgWidth / 2];
rlm@9 771 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
rlm@9 772 for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
rlm@9 773 rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
rlm@9 774 }
rlm@9 775 videoFrameChunk.getOutputStream().write(rgb4);
rlm@9 776 }
rlm@9 777 break;
rlm@9 778 }
rlm@9 779 case 8: {
rlm@9 780 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
rlm@9 781 int[] imgRGBs = new int[256];
rlm@9 782 imgPalette.getRGBs(imgRGBs);
rlm@9 783 int[] previousRGBs = new int[256];
rlm@9 784 if (previousPalette == null) {
rlm@9 785 previousPalette = palette;
rlm@9 786 }
rlm@9 787 previousPalette.getRGBs(previousRGBs);
rlm@9 788 if (!Arrays.equals(imgRGBs, previousRGBs)) {
rlm@9 789 previousPalette = imgPalette;
rlm@9 790 DataChunk paletteChangeChunk = new DataChunk("00pc");
rlm@9 791 /*
rlm@9 792 int first = imgPalette.getMapSize();
rlm@9 793 int last = -1;
rlm@9 794 for (int i = 0; i < 16; i++) {
rlm@9 795 if (previousRGBs[i] != imgRGBs[i] && i < first) {
rlm@9 796 first = i;
rlm@9 797 }
rlm@9 798 if (previousRGBs[i] != imgRGBs[i] && i > last) {
rlm@9 799 last = i;
rlm@9 800 }
rlm@9 801 }*/
rlm@9 802 int first = 0;
rlm@9 803 int last = imgPalette.getMapSize() - 1;
rlm@9 804 /*
rlm@9 805 * typedef struct {
rlm@9 806 BYTE bFirstEntry;
rlm@9 807 BYTE bNumEntries;
rlm@9 808 WORD wFlags;
rlm@9 809 PALETTEENTRY peNew[];
rlm@9 810 } AVIPALCHANGE;
rlm@9 811 *
rlm@9 812 * typedef struct tagPALETTEENTRY {
rlm@9 813 BYTE peRed;
rlm@9 814 BYTE peGreen;
rlm@9 815 BYTE peBlue;
rlm@9 816 BYTE peFlags;
rlm@9 817 } PALETTEENTRY;
rlm@9 818 */
rlm@9 819 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
rlm@9 820 pOut.writeByte(first);//bFirstEntry
rlm@9 821 pOut.writeByte(last - first + 1);//bNumEntries
rlm@9 822 pOut.writeShort(0);//wFlags
rlm@9 823
rlm@9 824 for (int i = first; i <= last; i++) {
rlm@9 825 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
rlm@9 826 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
rlm@9 827 pOut.writeByte(imgRGBs[i] & 0xff); // blue
rlm@9 828 pOut.writeByte(0); // reserved*/
rlm@9 829 }
rlm@9 830
rlm@9 831 moviChunk.add(paletteChangeChunk);
rlm@9 832 paletteChangeChunk.finish();
rlm@9 833 long length = getRelativeStreamPosition() - offset;
rlm@9 834 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
rlm@9 835 offset = getRelativeStreamPosition();
rlm@9 836 }
rlm@9 837
rlm@9 838 videoFrameChunk = new DataChunk("00db");
rlm@9 839 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
rlm@9 840 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
rlm@9 841 videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
rlm@9 842 }
rlm@9 843 break;
rlm@9 844 }
rlm@9 845 default: {
rlm@9 846 videoFrameChunk = new DataChunk("00db");
rlm@9 847 WritableRaster raster = image.getRaster();
rlm@9 848 int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
rlm@9 849 byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
rlm@9 850 for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
rlm@9 851 raster.getPixels(0, y, imgWidth, 1, raw);
rlm@9 852 for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
rlm@9 853 bytes[x + 2] = (byte) raw[x]; // Blue
rlm@9 854 bytes[x + 1] = (byte) raw[x + 1]; // Green
rlm@9 855 bytes[x] = (byte) raw[x + 2]; // Red
rlm@9 856 }
rlm@9 857 videoFrameChunk.getOutputStream().write(bytes);
rlm@9 858 }
rlm@9 859 break;
rlm@9 860 }
rlm@9 861 }
rlm@9 862 break;
rlm@9 863 }
rlm@9 864
rlm@9 865 case JPG: {
rlm@9 866 videoFrameChunk = new DataChunk("00dc");
rlm@9 867 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
rlm@9 868 ImageWriteParam iwParam = iw.getDefaultWriteParam();
rlm@9 869 iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
rlm@9 870 iwParam.setCompressionQuality(quality);
rlm@9 871 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
rlm@9 872 iw.setOutput(imgOut);
rlm@9 873 IIOImage img = new IIOImage(image, null, null);
rlm@9 874 iw.write(null, img, iwParam);
rlm@9 875 iw.dispose();
rlm@9 876 break;
rlm@9 877 }
rlm@9 878 case PNG:
rlm@9 879 default: {
rlm@9 880 videoFrameChunk = new DataChunk("00dc");
rlm@9 881 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
rlm@9 882 ImageWriteParam iwParam = iw.getDefaultWriteParam();
rlm@9 883 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
rlm@9 884 iw.setOutput(imgOut);
rlm@9 885 IIOImage img = new IIOImage(image, null, null);
rlm@9 886 iw.write(null, img, iwParam);
rlm@9 887 iw.dispose();
rlm@9 888 break;
rlm@9 889 }
rlm@9 890 }
rlm@9 891 long length = getRelativeStreamPosition() - offset;
rlm@9 892 moviChunk.add(videoFrameChunk);
rlm@9 893 videoFrameChunk.finish();
rlm@9 894
rlm@9 895 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
rlm@9 896 if (getRelativeStreamPosition() > 1L << 32) {
rlm@9 897 throw new IOException("AVI file is larger than 4 GB");
rlm@9 898 }
rlm@9 899 }
rlm@9 900
rlm@9 901 /**
rlm@9 902 * Writes a frame from a file to the video track.
rlm@9 903 * <p>
rlm@9 904 * This method does not inspect the contents of the file.
rlm@9 905 * For example, Its your responsibility to only add JPG files if you have
rlm@9 906 * chosen the JPEG video format.
rlm@9 907 * <p>
rlm@9 908 * If you add all frames from files or from input streams, then you
rlm@9 909 * have to explicitly set the dimension of the video track before you
rlm@9 910 * call finish() or close().
rlm@9 911 *
rlm@9 912 * @param file The file which holds the image data.
rlm@9 913 *
rlm@9 914 * @throws IllegalStateException if the duration is less than 1.
rlm@9 915 * @throws IOException if writing the image failed.
rlm@9 916 */
rlm@9 917 public void writeFrame(File file) throws IOException {
rlm@9 918 FileInputStream in = null;
rlm@9 919 try {
rlm@9 920 in = new FileInputStream(file);
rlm@9 921 writeFrame(in);
rlm@9 922 } finally {
rlm@9 923 if (in != null) {
rlm@9 924 in.close();
rlm@9 925 }
rlm@9 926 }
rlm@9 927 }
rlm@9 928
rlm@9 929 /**
rlm@9 930 * Writes a frame to the video track.
rlm@9 931 * <p>
rlm@9 932 * This method does not inspect the contents of the file.
rlm@9 933 * For example, its your responsibility to only add JPG files if you have
rlm@9 934 * chosen the JPEG video format.
rlm@9 935 * <p>
rlm@9 936 * If you add all frames from files or from input streams, then you
rlm@9 937 * have to explicitly set the dimension of the video track before you
rlm@9 938 * call finish() or close().
rlm@9 939 *
rlm@9 940 * @param in The input stream which holds the image data.
rlm@9 941 *
rlm@9 942 * @throws IllegalArgumentException if the duration is less than 1.
rlm@9 943 * @throws IOException if writing the image failed.
rlm@9 944 */
rlm@9 945 public void writeFrame(InputStream in) throws IOException {
rlm@9 946 ensureOpen();
rlm@9 947 ensureStarted();
rlm@9 948
rlm@9 949 DataChunk videoFrameChunk = new DataChunk(
rlm@9 950 videoFormat == VideoFormat.RAW ? "00db" : "00dc");
rlm@9 951 moviChunk.add(videoFrameChunk);
rlm@9 952 OutputStream mdatOut = videoFrameChunk.getOutputStream();
rlm@9 953 long offset = getRelativeStreamPosition();
rlm@9 954 byte[] buf = new byte[512];
rlm@9 955 int len;
rlm@9 956 while ((len = in.read(buf)) != -1) {
rlm@9 957 mdatOut.write(buf, 0, len);
rlm@9 958 }
rlm@9 959 long length = getRelativeStreamPosition() - offset;
rlm@9 960 videoFrameChunk.finish();
rlm@9 961 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
rlm@9 962 if (getRelativeStreamPosition() > 1L << 32) {
rlm@9 963 throw new IOException("AVI file is larger than 4 GB");
rlm@9 964 }
rlm@9 965 }
rlm@9 966
rlm@9 967 /**
rlm@9 968 * Closes the movie file as well as the stream being filtered.
rlm@9 969 *
rlm@9 970 * @exception IOException if an I/O error has occurred
rlm@9 971 */
rlm@9 972 public void close() throws IOException {
rlm@9 973 if (state == States.STARTED) {
rlm@9 974 finish();
rlm@9 975 }
rlm@9 976 if (state != States.CLOSED) {
rlm@9 977 out.close();
rlm@9 978 state = States.CLOSED;
rlm@9 979 }
rlm@9 980 }
rlm@9 981
rlm@9 982 /**
rlm@9 983 * Finishes writing the contents of the AVI output stream without closing
rlm@9 984 * the underlying stream. Use this method when applying multiple filters
rlm@9 985 * in succession to the same output stream.
rlm@9 986 *
rlm@9 987 * @exception IllegalStateException if the dimension of the video track
rlm@9 988 * has not been specified or determined yet.
rlm@9 989 * @exception IOException if an I/O exception has occurred
rlm@9 990 */
rlm@9 991 public void finish() throws IOException {
rlm@9 992 ensureOpen();
rlm@9 993 if (state != States.FINISHED) {
rlm@9 994 if (imgWidth == -1 || imgHeight == -1) {
rlm@9 995 throw new IllegalStateException("image width and height must be specified");
rlm@9 996 }
rlm@9 997
rlm@9 998 moviChunk.finish();
rlm@9 999 writeEpilog();
rlm@9 1000 state = States.FINISHED;
rlm@9 1001 imgWidth = imgHeight = -1;
rlm@9 1002 }
rlm@9 1003 }
rlm@9 1004
rlm@9 1005 /**
rlm@9 1006 * Check to make sure that this stream has not been closed
rlm@9 1007 */
rlm@9 1008 private void ensureOpen() throws IOException {
rlm@9 1009 if (state == States.CLOSED) {
rlm@9 1010 throw new IOException("Stream closed");
rlm@9 1011 }
rlm@9 1012 }
rlm@9 1013
rlm@9 1014 /** Gets the position relative to the beginning of the QuickTime stream.
rlm@9 1015 * <p>
rlm@9 1016 * Usually this value is equal to the stream position of the underlying
rlm@9 1017 * ImageOutputStream, but can be larger if the underlying stream already
rlm@9 1018 * contained data.
rlm@9 1019 *
rlm@9 1020 * @return The relative stream position.
rlm@9 1021 * @throws IOException
rlm@9 1022 */
rlm@9 1023 private long getRelativeStreamPosition() throws IOException {
rlm@9 1024 return out.getStreamPosition() - streamOffset;
rlm@9 1025 }
rlm@9 1026
rlm@9 1027 /** Seeks relative to the beginning of the QuickTime stream.
rlm@9 1028 * <p>
rlm@9 1029 * Usually this equal to seeking in the underlying ImageOutputStream, but
rlm@9 1030 * can be different if the underlying stream already contained data.
rlm@9 1031 *
rlm@9 1032 */
rlm@9 1033 private void seekRelative(long newPosition) throws IOException {
rlm@9 1034 out.seek(newPosition + streamOffset);
rlm@9 1035 }
rlm@9 1036
rlm@9 1037 private void writeProlog() throws IOException {
rlm@9 1038 // The file has the following structure:
rlm@9 1039 //
rlm@9 1040 // .RIFF AVI
rlm@9 1041 // ..avih (AVI Header Chunk)
rlm@9 1042 // ..LIST strl
rlm@9 1043 // ...strh (Stream Header Chunk)
rlm@9 1044 // ...strf (Stream Format Chunk)
rlm@9 1045 // ..LIST movi
rlm@9 1046 // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
rlm@9 1047 // ..idx1 (List of video data chunks and their location in the file)
rlm@9 1048
rlm@9 1049 // The RIFF AVI Chunk holds the complete movie
rlm@9 1050 aviChunk = new CompositeChunk("RIFF", "AVI ");
rlm@9 1051 CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
rlm@9 1052
rlm@9 1053 // Write empty AVI Main Header Chunk - we fill the data in later
rlm@9 1054 aviChunk.add(hdrlChunk);
rlm@9 1055 avihChunk = new FixedSizeDataChunk("avih", 56);
rlm@9 1056 avihChunk.seekToEndOfChunk();
rlm@9 1057 hdrlChunk.add(avihChunk);
rlm@9 1058
rlm@9 1059 CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
rlm@9 1060 hdrlChunk.add(strlChunk);
rlm@9 1061
rlm@9 1062 // Write empty AVI Stream Header Chunk - we fill the data in later
rlm@9 1063 strhChunk = new FixedSizeDataChunk("strh", 56);
rlm@9 1064 strhChunk.seekToEndOfChunk();
rlm@9 1065 strlChunk.add(strhChunk);
rlm@9 1066 strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
rlm@9 1067 strfChunk.seekToEndOfChunk();
rlm@9 1068 strlChunk.add(strfChunk);
rlm@9 1069
rlm@9 1070 moviChunk = new CompositeChunk("LIST", "movi");
rlm@9 1071 aviChunk.add(moviChunk);
rlm@9 1072
rlm@9 1073
rlm@9 1074 }
rlm@9 1075
rlm@9 1076 private void writeEpilog() throws IOException {
rlm@9 1077 // Compute values
rlm@9 1078 int duration = 0;
rlm@9 1079 for (Sample s : videoFrames) {
rlm@9 1080 duration += s.duration;
rlm@9 1081 }
rlm@9 1082 long bufferSize = 0;
rlm@9 1083 for (Sample s : videoFrames) {
rlm@9 1084 if (s.length > bufferSize) {
rlm@9 1085 bufferSize = s.length;
rlm@9 1086 }
rlm@9 1087 }
rlm@9 1088
rlm@9 1089
rlm@9 1090 DataChunkOutputStream d;
rlm@9 1091
rlm@9 1092 /* Create Idx1 Chunk and write data
rlm@9 1093 * -------------
rlm@9 1094 typedef struct _avioldindex {
rlm@9 1095 FOURCC fcc;
rlm@9 1096 DWORD cb;
rlm@9 1097 struct _avioldindex_entry {
rlm@9 1098 DWORD dwChunkId;
rlm@9 1099 DWORD dwFlags;
rlm@9 1100 DWORD dwOffset;
rlm@9 1101 DWORD dwSize;
rlm@9 1102 } aIndex[];
rlm@9 1103 } AVIOLDINDEX;
rlm@9 1104 */
rlm@9 1105 DataChunk idx1Chunk = new DataChunk("idx1");
rlm@9 1106 aviChunk.add(idx1Chunk);
rlm@9 1107 d = idx1Chunk.getOutputStream();
rlm@9 1108 long moviListOffset = moviChunk.offset + 8;
rlm@9 1109 //moviListOffset = 0;
rlm@9 1110 for (Sample f : videoFrames) {
rlm@9 1111
rlm@9 1112 d.writeType(f.chunkType); // dwChunkId
rlm@9 1113 // Specifies a FOURCC that identifies a stream in the AVI file. The
rlm@9 1114 // FOURCC must have the form 'xxyy' where xx is the stream number and yy
rlm@9 1115 // is a two-character code that identifies the contents of the stream:
rlm@9 1116 //
rlm@9 1117 // Two-character code Description
rlm@9 1118 // db Uncompressed video frame
rlm@9 1119 // dc Compressed video frame
rlm@9 1120 // pc Palette change
rlm@9 1121 // wb Audio data
rlm@9 1122
rlm@9 1123 d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
rlm@9 1124 | (f.isSync ? 0x10 : 0x0)); // dwFlags
rlm@9 1125 // Specifies a bitwise combination of zero or more of the following
rlm@9 1126 // flags:
rlm@9 1127 //
rlm@9 1128 // Value Name Description
rlm@9 1129 // 0x10 AVIIF_KEYFRAME The data chunk is a key frame.
rlm@9 1130 // 0x1 AVIIF_LIST The data chunk is a 'rec ' list.
rlm@9 1131 // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the
rlm@9 1132 // stream. For example, this flag should be set for
rlm@9 1133 // palette changes.
rlm@9 1134
rlm@9 1135 d.writeUInt(f.offset - moviListOffset); // dwOffset
rlm@9 1136 // Specifies the location of the data chunk in the file. The value
rlm@9 1137 // should be specified as an offset, in bytes, from the start of the
rlm@9 1138 // 'movi' list; however, in some AVI files it is given as an offset from
rlm@9 1139 // the start of the file.
rlm@9 1140
rlm@9 1141 d.writeUInt(f.length); // dwSize
rlm@9 1142 // Specifies the size of the data chunk, in bytes.
rlm@9 1143 }
rlm@9 1144 idx1Chunk.finish();
rlm@9 1145
rlm@9 1146 /* Write Data into AVI Main Header Chunk
rlm@9 1147 * -------------
rlm@9 1148 * The AVIMAINHEADER structure defines global information in an AVI file.
rlm@9 1149 * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
rlm@9 1150 typedef struct _avimainheader {
rlm@9 1151 FOURCC fcc;
rlm@9 1152 DWORD cb;
rlm@9 1153 DWORD dwMicroSecPerFrame;
rlm@9 1154 DWORD dwMaxBytesPerSec;
rlm@9 1155 DWORD dwPaddingGranularity;
rlm@9 1156 DWORD dwFlags;
rlm@9 1157 DWORD dwTotalFrames;
rlm@9 1158 DWORD dwInitialFrames;
rlm@9 1159 DWORD dwStreams;
rlm@9 1160 DWORD dwSuggestedBufferSize;
rlm@9 1161 DWORD dwWidth;
rlm@9 1162 DWORD dwHeight;
rlm@9 1163 DWORD dwReserved[4];
rlm@9 1164 } AVIMAINHEADER; */
rlm@9 1165 avihChunk.seekToStartOfData();
rlm@9 1166 d = avihChunk.getOutputStream();
rlm@9 1167
rlm@9 1168 d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
rlm@9 1169 // Specifies the number of microseconds between frames.
rlm@9 1170 // This value indicates the overall timing for the file.
rlm@9 1171
rlm@9 1172 d.writeUInt(0); // dwMaxBytesPerSec
rlm@9 1173 // Specifies the approximate maximum data rate of the file.
rlm@9 1174 // This value indicates the number of bytes per second the system
rlm@9 1175 // must handle to present an AVI sequence as specified by the other
rlm@9 1176 // parameters contained in the main header and stream header chunks.
rlm@9 1177
rlm@9 1178 d.writeUInt(0); // dwPaddingGranularity
rlm@9 1179 // Specifies the alignment for data, in bytes. Pad the data to multiples
rlm@9 1180 // of this value.
rlm@9 1181
rlm@9 1182 d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
rlm@9 1183 // Contains a bitwise combination of zero or more of the following
rlm@9 1184 // flags:
rlm@9 1185 //
rlm@9 1186 // Value Name Description
rlm@9 1187 // 0x10 AVIF_HASINDEX Indicates the AVI file has an index.
rlm@9 1188 // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the
rlm@9 1189 // index, rather than the physical ordering of the
rlm@9 1190 // chunks in the file, to determine the order of
rlm@9 1191 // presentation of the data. For example, this flag
rlm@9 1192 // could be used to create a list of frames for
rlm@9 1193 // editing.
rlm@9 1194 // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
rlm@9 1195 // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
rlm@9 1196 // allocated file used for capturing real-time
rlm@9 1197 // video. Applications should warn the user before
rlm@9 1198 // writing over a file with this flag set because
rlm@9 1199 // the user probably defragmented this file.
rlm@9 1200 // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
rlm@9 1201 // data and software. When this flag is used,
rlm@9 1202 // software should not permit the data to be
rlm@9 1203 // duplicated.
rlm@9 1204
rlm@9 1205 d.writeUInt(videoFrames.size()); // dwTotalFrames
rlm@9 1206 // Specifies the total number of frames of data in the file.
rlm@9 1207
rlm@9 1208 d.writeUInt(0); // dwInitialFrames
rlm@9 1209 // Specifies the initial frame for interleaved files. Noninterleaved
rlm@9 1210 // files should specify zero. If you are creating interleaved files,
rlm@9 1211 // specify the number of frames in the file prior to the initial frame
rlm@9 1212 // of the AVI sequence in this member.
rlm@9 1213 // To give the audio driver enough audio to work with, the audio data in
rlm@9 1214 // an interleaved file must be skewed from the video data. Typically,
rlm@9 1215 // the audio data should be moved forward enough frames to allow
rlm@9 1216 // approximately 0.75 seconds of audio data to be preloaded. The
rlm@9 1217 // dwInitialRecords member should be set to the number of frames the
rlm@9 1218 // audio is skewed. Also set the same value for the dwInitialFrames
rlm@9 1219 // member of the AVISTREAMHEADER structure in the audio stream header
rlm@9 1220
rlm@9 1221 d.writeUInt(1); // dwStreams
rlm@9 1222 // Specifies the number of streams in the file. For example, a file with
rlm@9 1223 // audio and video has two streams.
rlm@9 1224
rlm@9 1225 d.writeUInt(bufferSize); // dwSuggestedBufferSize
rlm@9 1226 // Specifies the suggested buffer size for reading the file. Generally,
rlm@9 1227 // this size should be large enough to contain the largest chunk in the
rlm@9 1228 // file. If set to zero, or if it is too small, the playback software
rlm@9 1229 // will have to reallocate memory during playback, which will reduce
rlm@9 1230 // performance. For an interleaved file, the buffer size should be large
rlm@9 1231 // enough to read an entire record, and not just a chunk.
rlm@9 1232
rlm@9 1233
rlm@9 1234 d.writeUInt(imgWidth); // dwWidth
rlm@9 1235 // Specifies the width of the AVI file in pixels.
rlm@9 1236
rlm@9 1237 d.writeUInt(imgHeight); // dwHeight
rlm@9 1238 // Specifies the height of the AVI file in pixels.
rlm@9 1239
rlm@9 1240 d.writeUInt(0); // dwReserved[0]
rlm@9 1241 d.writeUInt(0); // dwReserved[1]
rlm@9 1242 d.writeUInt(0); // dwReserved[2]
rlm@9 1243 d.writeUInt(0); // dwReserved[3]
rlm@9 1244 // Reserved. Set this array to zero.
rlm@9 1245
rlm@9 1246 /* Write Data into AVI Stream Header Chunk
rlm@9 1247 * -------------
rlm@9 1248 * The AVISTREAMHEADER structure contains information about one stream
rlm@9 1249 * in an AVI file.
rlm@9 1250 * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
rlm@9 1251 typedef struct _avistreamheader {
rlm@9 1252 FOURCC fcc;
rlm@9 1253 DWORD cb;
rlm@9 1254 FOURCC fccType;
rlm@9 1255 FOURCC fccHandler;
rlm@9 1256 DWORD dwFlags;
rlm@9 1257 WORD wPriority;
rlm@9 1258 WORD wLanguage;
rlm@9 1259 DWORD dwInitialFrames;
rlm@9 1260 DWORD dwScale;
rlm@9 1261 DWORD dwRate;
rlm@9 1262 DWORD dwStart;
rlm@9 1263 DWORD dwLength;
rlm@9 1264 DWORD dwSuggestedBufferSize;
rlm@9 1265 DWORD dwQuality;
rlm@9 1266 DWORD dwSampleSize;
rlm@9 1267 struct {
rlm@9 1268 short int left;
rlm@9 1269 short int top;
rlm@9 1270 short int right;
rlm@9 1271 short int bottom;
rlm@9 1272 } rcFrame;
rlm@9 1273 } AVISTREAMHEADER;
rlm@9 1274 */
rlm@9 1275 strhChunk.seekToStartOfData();
rlm@9 1276 d = strhChunk.getOutputStream();
rlm@9 1277 d.writeType("vids"); // fccType - vids for video stream
rlm@9 1278 // Contains a FOURCC that specifies the type of the data contained in
rlm@9 1279 // the stream. The following standard AVI values for video and audio are
rlm@9 1280 // defined:
rlm@9 1281 //
rlm@9 1282 // FOURCC Description
rlm@9 1283 // 'auds' Audio stream
rlm@9 1284 // 'mids' MIDI stream
rlm@9 1285 // 'txts' Text stream
rlm@9 1286 // 'vids' Video stream
rlm@9 1287
rlm@9 1288 switch (videoFormat) {
rlm@9 1289 case RAW:
rlm@9 1290 d.writeType("DIB "); // fccHandler - DIB for Raw RGB
rlm@9 1291 break;
rlm@9 1292 case RLE:
rlm@9 1293 d.writeType("RLE "); // fccHandler - Microsoft RLE
rlm@9 1294 break;
rlm@9 1295 case JPG:
rlm@9 1296 d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
rlm@9 1297 break;
rlm@9 1298 case PNG:
rlm@9 1299 default:
rlm@9 1300 d.writeType("png "); // fccHandler - png for PNG
rlm@9 1301 break;
rlm@9 1302 }
rlm@9 1303 // Optionally, contains a FOURCC that identifies a specific data
rlm@9 1304 // handler. The data handler is the preferred handler for the stream.
rlm@9 1305 // For audio and video streams, this specifies the codec for decoding
rlm@9 1306 // the stream.
rlm@9 1307
rlm@9 1308 if (imgDepth <= 8) {
rlm@9 1309 d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
rlm@9 1310 } else {
rlm@9 1311 d.writeUInt(0); // dwFlags
rlm@9 1312 }
rlm@9 1313
rlm@9 1314 // Contains any flags for the data stream. The bits in the high-order
rlm@9 1315 // word of these flags are specific to the type of data contained in the
rlm@9 1316 // stream. The following standard flags are defined:
rlm@9 1317 //
rlm@9 1318 // Value Name Description
rlm@9 1319 // AVISF_DISABLED 0x00000001 Indicates this stream should not
rlm@9 1320 // be enabled by default.
rlm@9 1321 // AVISF_VIDEO_PALCHANGES 0x00010000
rlm@9 1322 // Indicates this video stream contains
rlm@9 1323 // palette changes. This flag warns the playback
rlm@9 1324 // software that it will need to animate the
rlm@9 1325 // palette.
rlm@9 1326
rlm@9 1327 d.writeUShort(0); // wPriority
rlm@9 1328 // Specifies priority of a stream type. For example, in a file with
rlm@9 1329 // multiple audio streams, the one with the highest priority might be
rlm@9 1330 // the default stream.
rlm@9 1331
rlm@9 1332 d.writeUShort(0); // wLanguage
rlm@9 1333 // Language tag.
rlm@9 1334
rlm@9 1335 d.writeUInt(0); // dwInitialFrames
rlm@9 1336 // Specifies how far audio data is skewed ahead of the video frames in
rlm@9 1337 // interleaved files. Typically, this is about 0.75 seconds. If you are
rlm@9 1338 // creating interleaved files, specify the number of frames in the file
rlm@9 1339 // prior to the initial frame of the AVI sequence in this member. For
rlm@9 1340 // more information, see the remarks for the dwInitialFrames member of
rlm@9 1341 // the AVIMAINHEADER structure.
rlm@9 1342
rlm@9 1343 d.writeUInt(timeScale); // dwScale
rlm@9 1344 // Used with dwRate to specify the time scale that this stream will use.
rlm@9 1345 // Dividing dwRate by dwScale gives the number of samples per second.
rlm@9 1346 // For video streams, this is the frame rate. For audio streams, this
rlm@9 1347 // rate corresponds to the time needed to play nBlockAlign bytes of
rlm@9 1348 // audio, which for PCM audio is the just the sample rate.
rlm@9 1349
rlm@9 1350 d.writeUInt(frameRate); // dwRate
rlm@9 1351 // See dwScale.
rlm@9 1352
rlm@9 1353 d.writeUInt(0); // dwStart
rlm@9 1354 // Specifies the starting time for this stream. The units are defined by
rlm@9 1355 // the dwRate and dwScale members in the main file header. Usually, this
rlm@9 1356 // is zero, but it can specify a delay time for a stream that does not
rlm@9 1357 // start concurrently with the file.
rlm@9 1358
rlm@9 1359 d.writeUInt(videoFrames.size()); // dwLength
rlm@9 1360 // Specifies the length of this stream. The units are defined by the
rlm@9 1361 // dwRate and dwScale members of the stream's header.
rlm@9 1362
rlm@9 1363 d.writeUInt(bufferSize); // dwSuggestedBufferSize
rlm@9 1364 // Specifies how large a buffer should be used to read this stream.
rlm@9 1365 // Typically, this contains a value corresponding to the largest chunk
rlm@9 1366 // present in the stream. Using the correct buffer size makes playback
rlm@9 1367 // more efficient. Use zero if you do not know the correct buffer size.
rlm@9 1368
rlm@9 1369 d.writeInt(-1); // dwQuality
rlm@9 1370 // Specifies an indicator of the quality of the data in the stream.
rlm@9 1371 // Quality is represented as a number between 0 and 10,000.
rlm@9 1372 // For compressed data, this typically represents the value of the
rlm@9 1373 // quality parameter passed to the compression software. If set to –1,
rlm@9 1374 // drivers use the default quality value.
rlm@9 1375
rlm@9 1376 d.writeUInt(0); // dwSampleSize
rlm@9 1377 // Specifies the size of a single sample of data. This is set to zero
rlm@9 1378 // if the samples can vary in size. If this number is nonzero, then
rlm@9 1379 // multiple samples of data can be grouped into a single chunk within
rlm@9 1380 // the file. If it is zero, each sample of data (such as a video frame)
rlm@9 1381 // must be in a separate chunk. For video streams, this number is
rlm@9 1382 // typically zero, although it can be nonzero if all video frames are
rlm@9 1383 // the same size. For audio streams, this number should be the same as
rlm@9 1384 // the nBlockAlign member of the WAVEFORMATEX structure describing the
rlm@9 1385 // audio.
rlm@9 1386
rlm@9 1387 d.writeUShort(0); // rcFrame.left
rlm@9 1388 d.writeUShort(0); // rcFrame.top
rlm@9 1389 d.writeUShort(imgWidth); // rcFrame.right
rlm@9 1390 d.writeUShort(imgHeight); // rcFrame.bottom
rlm@9 1391 // Specifies the destination rectangle for a text or video stream within
rlm@9 1392 // the movie rectangle specified by the dwWidth and dwHeight members of
rlm@9 1393 // the AVI main header structure. The rcFrame member is typically used
rlm@9 1394 // in support of multiple video streams. Set this rectangle to the
rlm@9 1395 // coordinates corresponding to the movie rectangle to update the whole
rlm@9 1396 // movie rectangle. Units for this member are pixels. The upper-left
rlm@9 1397 // corner of the destination rectangle is relative to the upper-left
rlm@9 1398 // corner of the movie rectangle.
rlm@9 1399
rlm@9 1400 /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
rlm@9 1401 /* -------------
rlm@9 1402 * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
rlm@9 1403 typedef struct tagBITMAPINFOHEADER {
rlm@9 1404 DWORD biSize;
rlm@9 1405 LONG biWidth;
rlm@9 1406 LONG biHeight;
rlm@9 1407 WORD biPlanes;
rlm@9 1408 WORD biBitCount;
rlm@9 1409 DWORD biCompression;
rlm@9 1410 DWORD biSizeImage;
rlm@9 1411 LONG biXPelsPerMeter;
rlm@9 1412 LONG biYPelsPerMeter;
rlm@9 1413 DWORD biClrUsed;
rlm@9 1414 DWORD biClrImportant;
rlm@9 1415 } BITMAPINFOHEADER;
rlm@9 1416 */
rlm@9 1417 strfChunk.seekToStartOfData();
rlm@9 1418 d = strfChunk.getOutputStream();
rlm@9 1419 d.writeUInt(40); // biSize
rlm@9 1420 // Specifies the number of bytes required by the structure. This value
rlm@9 1421 // does not include the size of the color table or the size of the color
rlm@9 1422 // masks, if they are appended to the end of structure.
rlm@9 1423
rlm@9 1424 d.writeInt(imgWidth); // biWidth
rlm@9 1425 // Specifies the width of the bitmap, in pixels.
rlm@9 1426
rlm@9 1427 d.writeInt(imgHeight); // biHeight
rlm@9 1428 // Specifies the height of the bitmap, in pixels.
rlm@9 1429 //
rlm@9 1430 // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
rlm@9 1431 // a bottom-up DIB with the origin at the lower left corner. If biHeight
rlm@9 1432 // is negative, the bitmap is a top-down DIB with the origin at the
rlm@9 1433 // upper left corner.
rlm@9 1434 // For YUV bitmaps, the bitmap is always top-down, regardless of the
rlm@9 1435 // sign of biHeight. Decoders should offer YUV formats with postive
rlm@9 1436 // biHeight, but for backward compatibility they should accept YUV
rlm@9 1437 // formats with either positive or negative biHeight.
rlm@9 1438 // For compressed formats, biHeight must be positive, regardless of
rlm@9 1439 // image orientation.
rlm@9 1440
rlm@9 1441 d.writeShort(1); // biPlanes
rlm@9 1442 // Specifies the number of planes for the target device. This value must
rlm@9 1443 // be set to 1.
rlm@9 1444
rlm@9 1445 d.writeShort(imgDepth); // biBitCount
rlm@9 1446 // Specifies the number of bits per pixel (bpp). For uncompressed
rlm@9 1447 // formats, this value is the average number of bits per pixel. For
rlm@9 1448 // compressed formats, this value is the implied bit depth of the
rlm@9 1449 // uncompressed image, after the image has been decoded.
rlm@9 1450
rlm@9 1451 switch (videoFormat) {
rlm@9 1452 case RAW:
rlm@9 1453 default:
rlm@9 1454 d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
rlm@9 1455 break;
rlm@9 1456 case RLE:
rlm@9 1457 if (imgDepth == 8) {
rlm@9 1458 d.writeInt(1); // biCompression - BI_RLE8
rlm@9 1459 } else if (imgDepth == 4) {
rlm@9 1460 d.writeInt(2); // biCompression - BI_RLE4
rlm@9 1461 } else {
rlm@9 1462 throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
rlm@9 1463 }
rlm@9 1464 break;
rlm@9 1465 case JPG:
rlm@9 1466 d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
rlm@9 1467 break;
rlm@9 1468 case PNG:
rlm@9 1469 d.writeType("png "); // biCompression - png for PNG
rlm@9 1470 break;
rlm@9 1471 }
rlm@9 1472 // For compressed video and YUV formats, this member is a FOURCC code,
rlm@9 1473 // specified as a DWORD in little-endian order. For example, YUYV video
rlm@9 1474 // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
rlm@9 1475 // Codes.
rlm@9 1476 //
rlm@9 1477 // For uncompressed RGB formats, the following values are possible:
rlm@9 1478 //
rlm@9 1479 // Value Description
rlm@9 1480 // BI_RGB 0x00000000 Uncompressed RGB.
rlm@9 1481 // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
rlm@9 1482 // Valid for 16-bpp and 32-bpp bitmaps.
rlm@9 1483 //
rlm@9 1484 // Note that BI_JPG and BI_PNG are not valid video formats.
rlm@9 1485 //
rlm@9 1486 // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
rlm@9 1487 // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
rlm@9 1488 // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
rlm@9 1489 // structure to determine the specific RGB type.
rlm@9 1490
rlm@9 1491 switch (videoFormat) {
rlm@9 1492 case RAW:
rlm@9 1493 d.writeInt(0); // biSizeImage
rlm@9 1494 break;
rlm@9 1495 case RLE:
rlm@9 1496 case JPG:
rlm@9 1497 case PNG:
rlm@9 1498 default:
rlm@9 1499 if (imgDepth == 4) {
rlm@9 1500 d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
rlm@9 1501 } else {
rlm@9 1502 int bytesPerPixel = Math.max(1, imgDepth / 8);
rlm@9 1503 d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
rlm@9 1504 }
rlm@9 1505 break;
rlm@9 1506 }
rlm@9 1507 // Specifies the size, in bytes, of the image. This can be set to 0 for
rlm@9 1508 // uncompressed RGB bitmaps.
rlm@9 1509
rlm@9 1510 d.writeInt(0); // biXPelsPerMeter
rlm@9 1511 // Specifies the horizontal resolution, in pixels per meter, of the
rlm@9 1512 // target device for the bitmap.
rlm@9 1513
rlm@9 1514 d.writeInt(0); // biYPelsPerMeter
rlm@9 1515 // Specifies the vertical resolution, in pixels per meter, of the target
rlm@9 1516 // device for the bitmap.
rlm@9 1517
rlm@9 1518 d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
rlm@9 1519 // Specifies the number of color indices in the color table that are
rlm@9 1520 // actually used by the bitmap.
rlm@9 1521
rlm@9 1522 d.writeInt(0); // biClrImportant
rlm@9 1523 // Specifies the number of color indices that are considered important
rlm@9 1524 // for displaying the bitmap. If this value is zero, all colors are
rlm@9 1525 // important.
rlm@9 1526
rlm@9 1527 if (palette != null) {
rlm@9 1528 for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
rlm@9 1529 /*
rlm@9 1530 * typedef struct tagRGBQUAD {
rlm@9 1531 BYTE rgbBlue;
rlm@9 1532 BYTE rgbGreen;
rlm@9 1533 BYTE rgbRed;
rlm@9 1534 BYTE rgbReserved; // This member is reserved and must be zero.
rlm@9 1535 } RGBQUAD;
rlm@9 1536 */
rlm@9 1537 d.write(palette.getBlue(i));
rlm@9 1538 d.write(palette.getGreen(i));
rlm@9 1539 d.write(palette.getRed(i));
rlm@9 1540 d.write(0);
rlm@9 1541 }
rlm@9 1542 }
rlm@9 1543
rlm@9 1544
rlm@9 1545 // -----------------
rlm@9 1546 aviChunk.finish();
rlm@9 1547 }
rlm@9 1548 }