annotate src/com/aurellem/capture/AVIOutputStream.java @ 4:edaa7e7806e4

migrated IsoTimer
author Robert McIntyre <rlm@mit.edu>
date Tue, 25 Oct 2011 12:03:01 -0700
parents a92de00f0414
children
rev   line source
rlm@3 1 /**
rlm@3 2 * @(#)AVIOutputStream.java 1.5.1 2011-01-17
rlm@3 3 *
rlm@3 4 * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
rlm@3 5 * All rights reserved.
rlm@3 6 *
rlm@3 7 * You may not use, copy or modify this file, except in compliance with the
rlm@3 8 * license agreement you entered into with Werner Randelshofer.
rlm@3 9 * For details see accompanying license terms.
rlm@3 10 */
rlm@3 11 package com.aurellem.capture;
rlm@3 12
rlm@3 13 import java.awt.Dimension;
rlm@3 14 import java.awt.image.BufferedImage;
rlm@3 15 import java.awt.image.DataBufferByte;
rlm@3 16 import java.awt.image.IndexColorModel;
rlm@3 17 import java.awt.image.WritableRaster;
rlm@3 18 import java.io.File;
rlm@3 19 import java.io.FileInputStream;
rlm@3 20 import java.io.IOException;
rlm@3 21 import java.io.InputStream;
rlm@3 22 import java.io.OutputStream;
rlm@3 23 import java.util.Arrays;
rlm@3 24 import java.util.Date;
rlm@3 25 import java.util.LinkedList;
rlm@3 26
rlm@3 27 import javax.imageio.IIOImage;
rlm@3 28 import javax.imageio.ImageIO;
rlm@3 29 import javax.imageio.ImageWriteParam;
rlm@3 30 import javax.imageio.ImageWriter;
rlm@3 31 import javax.imageio.stream.FileImageOutputStream;
rlm@3 32 import javax.imageio.stream.ImageOutputStream;
rlm@3 33 import javax.imageio.stream.MemoryCacheImageOutputStream;
rlm@3 34
rlm@3 35 /**
rlm@3 36 * This class supports writing of images into an AVI 1.0 video file.
rlm@3 37 * <p>
rlm@3 38 * The images are written as video frames.
rlm@3 39 * <p>
rlm@3 40 * Video frames can be encoded with one of the following formats:
rlm@3 41 * <ul>
rlm@3 42 * <li>JPEG</li>
rlm@3 43 * <li>PNG</li>
rlm@3 44 * <li>RAW</li>
rlm@3 45 * <li>RLE</li>
rlm@3 46 * </ul>
rlm@3 47 * All frames must have the same format.
rlm@3 48 * When JPG is used each frame can have an individual encoding quality.
rlm@3 49 * <p>
rlm@3 50 * All frames in an AVI file must have the same duration. The duration can
rlm@3 51 * be set by setting an appropriate pair of values using methods
rlm@3 52 * {@link #setFrameRate} and {@link #setTimeScale}.
rlm@3 53 * <p>
rlm@3 54 * The length of an AVI 1.0 file is limited to 1 GB.
rlm@3 55 * This class supports lengths of up to 4 GB, but such files may not work on
rlm@3 56 * all players.
rlm@3 57 * <p>
rlm@3 58 * For detailed information about the AVI RIFF file format see:<br>
rlm@3 59 * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
rlm@3 60 * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
rlm@3 61 * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
rlm@3 62 *
rlm@3 63 * @author Werner Randelshofer
rlm@3 64 * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
rlm@3 65 * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
rlm@3 66 * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
rlm@3 67 * in "idx1" chunk.
rlm@3 68 * <br>1.3.2 2010-12-27 File size limit is 1 GB.
rlm@3 69 * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
rlm@3 70 * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
rlm@3 71 * Added method getVideoDimension().
rlm@3 72 * <br>1.2 2009-08-29 Adds support for RAW video format.
rlm@3 73 * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
rlm@3 74 * chunk. Changed the API to reflect that AVI works with frame rates instead of
rlm@3 75 * with frame durations.
rlm@3 76 * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
rlm@3 77 * encoded video.
rlm@3 78 * <br>1.0 2008-08-11 Created.
rlm@3 79 */
rlm@3 80 public class AVIOutputStream {
rlm@3 81
rlm@3 82 /**
rlm@3 83 * Underlying output stream.
rlm@3 84 */
rlm@3 85 private ImageOutputStream out;
rlm@3 86 /** The offset of the QuickTime stream in the underlying ImageOutputStream.
rlm@3 87 * Normally this is 0 unless the underlying stream already contained data
rlm@3 88 * when it was passed to the constructor.
rlm@3 89 */
rlm@3 90 private long streamOffset;
rlm@3 91 /** Previous frame for delta compression. */
rlm@3 92 private Object previousData;
rlm@3 93
rlm@3 94 /**
rlm@3 95 * Supported video encodings.
rlm@3 96 */
rlm@3 97 public static enum VideoFormat {
rlm@3 98
rlm@3 99 RAW, RLE, JPG, PNG;
rlm@3 100 }
rlm@3 101 /**
rlm@3 102 * Current video formats.
rlm@3 103 */
rlm@3 104 private VideoFormat videoFormat;
rlm@3 105 /**
rlm@3 106 * Quality of JPEG encoded video frames.
rlm@3 107 */
rlm@3 108 private float quality = 0.9f;
rlm@3 109 /**
rlm@3 110 * Creation time of the movie output stream.
rlm@3 111 */
rlm@3 112 private Date creationTime;
rlm@3 113 /**
rlm@3 114 * Width of the video frames. All frames must have the same width.
rlm@3 115 * The value -1 is used to mark unspecified width.
rlm@3 116 */
rlm@3 117 private int imgWidth = -1;
rlm@3 118 /**
rlm@3 119 * Height of the video frames. All frames must have the same height.
rlm@3 120 * The value -1 is used to mark unspecified height.
rlm@3 121 */
rlm@3 122 private int imgHeight = -1;
rlm@3 123 /** Number of bits per pixel. */
rlm@3 124 private int imgDepth = 24;
rlm@3 125 /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
rlm@3 126 private IndexColorModel palette;
rlm@3 127 private IndexColorModel previousPalette;
rlm@3 128 /** Video encoder. */
rlm@3 129
rlm@3 130 /**
rlm@3 131 * The timeScale of the movie.
rlm@3 132 * <p>
rlm@3 133 * Used with frameRate to specify the time scale that this stream will use.
rlm@3 134 * Dividing frameRate by timeScale gives the number of samples per second.
rlm@3 135 * For video streams, this is the frame rate. For audio streams, this rate
rlm@3 136 * corresponds to the time needed to play nBlockAlign bytes of audio, which
rlm@3 137 * for PCM audio is the just the sample rate.
rlm@3 138 */
rlm@3 139 private int timeScale = 1;
rlm@3 140 /**
rlm@3 141 * The frameRate of the movie in timeScale units.
rlm@3 142 * <p>
rlm@3 143 * @see timeScale
rlm@3 144 */
rlm@3 145 private int frameRate = 30;
rlm@3 146 /** Interval between keyframes. */
rlm@3 147 private int syncInterval = 30;
rlm@3 148
rlm@3 149 /**
rlm@3 150 * The states of the movie output stream.
rlm@3 151 */
rlm@3 152 private static enum States {
rlm@3 153
rlm@3 154 STARTED, FINISHED, CLOSED;
rlm@3 155 }
rlm@3 156 /**
rlm@3 157 * The current state of the movie output stream.
rlm@3 158 */
rlm@3 159 private States state = States.FINISHED;
rlm@3 160
rlm@3 161 /**
rlm@3 162 * AVI stores media data in samples.
rlm@3 163 * A sample is a single element in a sequence of time-ordered data.
rlm@3 164 */
rlm@3 165 private static class Sample {
rlm@3 166
rlm@3 167 String chunkType;
rlm@3 168 /** Offset of the sample relative to the start of the AVI file.
rlm@3 169 */
rlm@3 170 long offset;
rlm@3 171 /** Data length of the sample. */
rlm@3 172 long length;
rlm@3 173 /**
rlm@3 174 * The duration of the sample in time scale units.
rlm@3 175 */
rlm@3 176 int duration;
rlm@3 177 /** Whether the sample is a sync-sample. */
rlm@3 178 boolean isSync;
rlm@3 179
rlm@3 180 /**
rlm@3 181 * Creates a new sample.
rlm@3 182 * @param duration
rlm@3 183 * @param offset
rlm@3 184 * @param length
rlm@3 185 */
rlm@3 186 public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
rlm@3 187 this.chunkType = chunkId;
rlm@3 188 this.duration = duration;
rlm@3 189 this.offset = offset;
rlm@3 190 this.length = length;
rlm@3 191 this.isSync = isSync;
rlm@3 192 }
rlm@3 193 }
rlm@3 194 /**
rlm@3 195 * List of video frames.
rlm@3 196 */
rlm@3 197 private LinkedList<Sample> videoFrames;
rlm@3 198 /**
rlm@3 199 * This chunk holds the whole AVI content.
rlm@3 200 */
rlm@3 201 private CompositeChunk aviChunk;
rlm@3 202 /**
rlm@3 203 * This chunk holds the movie frames.
rlm@3 204 */
rlm@3 205 private CompositeChunk moviChunk;
rlm@3 206 /**
rlm@3 207 * This chunk holds the AVI Main Header.
rlm@3 208 */
rlm@3 209 FixedSizeDataChunk avihChunk;
rlm@3 210 /**
rlm@3 211 * This chunk holds the AVI Stream Header.
rlm@3 212 */
rlm@3 213 FixedSizeDataChunk strhChunk;
rlm@3 214 /**
rlm@3 215 * This chunk holds the AVI Stream Format Header.
rlm@3 216 */
rlm@3 217 FixedSizeDataChunk strfChunk;
rlm@3 218
rlm@3 219 /**
rlm@3 220 * Chunk base class.
rlm@3 221 */
rlm@3 222 private abstract class Chunk {
rlm@3 223
rlm@3 224 /**
rlm@3 225 * The chunkType of the chunk. A String with the length of 4 characters.
rlm@3 226 */
rlm@3 227 protected String chunkType;
rlm@3 228 /**
rlm@3 229 * The offset of the chunk relative to the start of the
rlm@3 230 * ImageOutputStream.
rlm@3 231 */
rlm@3 232 protected long offset;
rlm@3 233
rlm@3 234 /**
rlm@3 235 * Creates a new Chunk at the current position of the ImageOutputStream.
rlm@3 236 * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
rlm@3 237 */
rlm@3 238 public Chunk(String chunkType) throws IOException {
rlm@3 239 this.chunkType = chunkType;
rlm@3 240 offset = getRelativeStreamPosition();
rlm@3 241 }
rlm@3 242
rlm@3 243 /**
rlm@3 244 * Writes the chunk to the ImageOutputStream and disposes it.
rlm@3 245 */
rlm@3 246 public abstract void finish() throws IOException;
rlm@3 247
rlm@3 248 /**
rlm@3 249 * Returns the size of the chunk including the size of the chunk header.
rlm@3 250 * @return The size of the chunk.
rlm@3 251 */
rlm@3 252 public abstract long size();
rlm@3 253 }
rlm@3 254
rlm@3 255 /**
rlm@3 256 * A CompositeChunk contains an ordered list of Chunks.
rlm@3 257 */
rlm@3 258 private class CompositeChunk extends Chunk {
rlm@3 259
rlm@3 260 /**
rlm@3 261 * The type of the composite. A String with the length of 4 characters.
rlm@3 262 */
rlm@3 263 protected String compositeType;
rlm@3 264 private LinkedList<Chunk> children;
rlm@3 265 private boolean finished;
rlm@3 266
rlm@3 267 /**
rlm@3 268 * Creates a new CompositeChunk at the current position of the
rlm@3 269 * ImageOutputStream.
rlm@3 270 * @param compositeType The type of the composite.
rlm@3 271 * @param chunkType The type of the chunk.
rlm@3 272 */
rlm@3 273 public CompositeChunk(String compositeType, String chunkType) throws IOException {
rlm@3 274 super(chunkType);
rlm@3 275 this.compositeType = compositeType;
rlm@3 276 //out.write
rlm@3 277 out.writeLong(0); // make room for the chunk header
rlm@3 278 out.writeInt(0); // make room for the chunk header
rlm@3 279 children = new LinkedList<Chunk>();
rlm@3 280 }
rlm@3 281
rlm@3 282 public void add(Chunk child) throws IOException {
rlm@3 283 if (children.size() > 0) {
rlm@3 284 children.getLast().finish();
rlm@3 285 }
rlm@3 286 children.add(child);
rlm@3 287 }
rlm@3 288
rlm@3 289 /**
rlm@3 290 * Writes the chunk and all its children to the ImageOutputStream
rlm@3 291 * and disposes of all resources held by the chunk.
rlm@3 292 * @throws java.io.IOException
rlm@3 293 */
rlm@3 294 @Override
rlm@3 295 public void finish() throws IOException {
rlm@3 296 if (!finished) {
rlm@3 297 if (size() > 0xffffffffL) {
rlm@3 298 throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
rlm@3 299 }
rlm@3 300
rlm@3 301 long pointer = getRelativeStreamPosition();
rlm@3 302 seekRelative(offset);
rlm@3 303
rlm@3 304 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@3 305 headerData.writeType(compositeType);
rlm@3 306 headerData.writeUInt(size() - 8);
rlm@3 307 headerData.writeType(chunkType);
rlm@3 308 for (Chunk child : children) {
rlm@3 309 child.finish();
rlm@3 310 }
rlm@3 311 seekRelative(pointer);
rlm@3 312 if (size() % 2 == 1) {
rlm@3 313 out.writeByte(0); // write pad byte
rlm@3 314 }
rlm@3 315 finished = true;
rlm@3 316 }
rlm@3 317 }
rlm@3 318
rlm@3 319 @Override
rlm@3 320 public long size() {
rlm@3 321 long length = 12;
rlm@3 322 for (Chunk child : children) {
rlm@3 323 length += child.size() + child.size() % 2;
rlm@3 324 }
rlm@3 325 return length;
rlm@3 326 }
rlm@3 327 }
rlm@3 328
rlm@3 329 /**
rlm@3 330 * Data Chunk.
rlm@3 331 */
rlm@3 332 private class DataChunk extends Chunk {
rlm@3 333
rlm@3 334 private DataChunkOutputStream data;
rlm@3 335 private boolean finished;
rlm@3 336
rlm@3 337 /**
rlm@3 338 * Creates a new DataChunk at the current position of the
rlm@3 339 * ImageOutputStream.
rlm@3 340 * @param chunkType The chunkType of the chunk.
rlm@3 341 */
rlm@3 342 public DataChunk(String name) throws IOException {
rlm@3 343 super(name);
rlm@3 344 out.writeLong(0); // make room for the chunk header
rlm@3 345 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
rlm@3 346 }
rlm@3 347
rlm@3 348 public DataChunkOutputStream getOutputStream() {
rlm@3 349 if (finished) {
rlm@3 350 throw new IllegalStateException("DataChunk is finished");
rlm@3 351 }
rlm@3 352 return data;
rlm@3 353 }
rlm@3 354
rlm@3 355 /**
rlm@3 356 * Returns the offset of this chunk to the beginning of the random access file
rlm@3 357 * @return
rlm@3 358 */
rlm@3 359 public long getOffset() {
rlm@3 360 return offset;
rlm@3 361 }
rlm@3 362
rlm@3 363 @Override
rlm@3 364 public void finish() throws IOException {
rlm@3 365 if (!finished) {
rlm@3 366 long sizeBefore = size();
rlm@3 367
rlm@3 368 if (size() > 0xffffffffL) {
rlm@3 369 throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
rlm@3 370 }
rlm@3 371
rlm@3 372 long pointer = getRelativeStreamPosition();
rlm@3 373 seekRelative(offset);
rlm@3 374
rlm@3 375 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@3 376 headerData.writeType(chunkType);
rlm@3 377 headerData.writeUInt(size() - 8);
rlm@3 378 seekRelative(pointer);
rlm@3 379 if (size() % 2 == 1) {
rlm@3 380 out.writeByte(0); // write pad byte
rlm@3 381 }
rlm@3 382 finished = true;
rlm@3 383 long sizeAfter = size();
rlm@3 384 if (sizeBefore != sizeAfter) {
rlm@3 385 System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
rlm@3 386 }
rlm@3 387 }
rlm@3 388 }
rlm@3 389
rlm@3 390 @Override
rlm@3 391 public long size() {
rlm@3 392 return 8 + data.size();
rlm@3 393 }
rlm@3 394 }
rlm@3 395
rlm@3 396 /**
rlm@3 397 * A DataChunk with a fixed size.
rlm@3 398 */
rlm@3 399 private class FixedSizeDataChunk extends Chunk {
rlm@3 400
rlm@3 401 private DataChunkOutputStream data;
rlm@3 402 private boolean finished;
rlm@3 403 private long fixedSize;
rlm@3 404
rlm@3 405 /**
rlm@3 406 * Creates a new DataChunk at the current position of the
rlm@3 407 * ImageOutputStream.
rlm@3 408 * @param chunkType The chunkType of the chunk.
rlm@3 409 */
rlm@3 410 public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
rlm@3 411 super(chunkType);
rlm@3 412 this.fixedSize = fixedSize;
rlm@3 413 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
rlm@3 414 data.writeType(chunkType);
rlm@3 415 data.writeUInt(fixedSize);
rlm@3 416 data.clearCount();
rlm@3 417
rlm@3 418 // Fill fixed size with nulls
rlm@3 419 byte[] buf = new byte[(int) Math.min(512, fixedSize)];
rlm@3 420 long written = 0;
rlm@3 421 while (written < fixedSize) {
rlm@3 422 data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
rlm@3 423 written += Math.min(buf.length, fixedSize - written);
rlm@3 424 }
rlm@3 425 if (fixedSize % 2 == 1) {
rlm@3 426 out.writeByte(0); // write pad byte
rlm@3 427 }
rlm@3 428 seekToStartOfData();
rlm@3 429 }
rlm@3 430
rlm@3 431 public DataChunkOutputStream getOutputStream() {
rlm@3 432 /*if (finished) {
rlm@3 433 throw new IllegalStateException("DataChunk is finished");
rlm@3 434 }*/
rlm@3 435 return data;
rlm@3 436 }
rlm@3 437
rlm@3 438 /**
rlm@3 439 * Returns the offset of this chunk to the beginning of the random access file
rlm@3 440 * @return
rlm@3 441 */
rlm@3 442 public long getOffset() {
rlm@3 443 return offset;
rlm@3 444 }
rlm@3 445
rlm@3 446 public void seekToStartOfData() throws IOException {
rlm@3 447 seekRelative(offset + 8);
rlm@3 448 data.clearCount();
rlm@3 449 }
rlm@3 450
rlm@3 451 public void seekToEndOfChunk() throws IOException {
rlm@3 452 seekRelative(offset + 8 + fixedSize + fixedSize % 2);
rlm@3 453 }
rlm@3 454
rlm@3 455 @Override
rlm@3 456 public void finish() throws IOException {
rlm@3 457 if (!finished) {
rlm@3 458 finished = true;
rlm@3 459 }
rlm@3 460 }
rlm@3 461
rlm@3 462 @Override
rlm@3 463 public long size() {
rlm@3 464 return 8 + fixedSize;
rlm@3 465 }
rlm@3 466 }
rlm@3 467
rlm@3 468 /**
rlm@3 469 * Creates a new AVI file with the specified video format and
rlm@3 470 * frame rate. The video has 24 bits per pixel.
rlm@3 471 *
rlm@3 472 * @param file the output file
rlm@3 473 * @param format Selects an encoder for the video format.
rlm@3 474 * @param bitsPerPixel the number of bits per pixel.
rlm@3 475 * @exception IllegalArgumentException if videoFormat is null or if
rlm@3 476 * frame rate is <= 0
rlm@3 477 */
rlm@3 478 public AVIOutputStream(File file, VideoFormat format) throws IOException {
rlm@3 479 this(file,format,24);
rlm@3 480 }
rlm@3 481 /**
rlm@3 482 * Creates a new AVI file with the specified video format and
rlm@3 483 * frame rate.
rlm@3 484 *
rlm@3 485 * @param file the output file
rlm@3 486 * @param format Selects an encoder for the video format.
rlm@3 487 * @param bitsPerPixel the number of bits per pixel.
rlm@3 488 * @exception IllegalArgumentException if videoFormat is null or if
rlm@3 489 * frame rate is <= 0
rlm@3 490 */
rlm@3 491 public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
rlm@3 492 if (format == null) {
rlm@3 493 throw new IllegalArgumentException("format must not be null");
rlm@3 494 }
rlm@3 495
rlm@3 496 if (file.exists()) {
rlm@3 497 file.delete();
rlm@3 498 }
rlm@3 499 this.out = new FileImageOutputStream(file);
rlm@3 500 this.streamOffset = 0;
rlm@3 501 this.videoFormat = format;
rlm@3 502 this.videoFrames = new LinkedList<Sample>();
rlm@3 503 this.imgDepth = bitsPerPixel;
rlm@3 504 if (imgDepth == 4) {
rlm@3 505 byte[] gray = new byte[16];
rlm@3 506 for (int i = 0; i < gray.length; i++) {
rlm@3 507 gray[i] = (byte) ((i << 4) | i);
rlm@3 508 }
rlm@3 509 palette = new IndexColorModel(4, 16, gray, gray, gray);
rlm@3 510 } else if (imgDepth == 8) {
rlm@3 511 byte[] gray = new byte[256];
rlm@3 512 for (int i = 0; i < gray.length; i++) {
rlm@3 513 gray[i] = (byte) i;
rlm@3 514 }
rlm@3 515 palette = new IndexColorModel(8, 256, gray, gray, gray);
rlm@3 516 }
rlm@3 517
rlm@3 518 }
rlm@3 519
rlm@3 520 /**
rlm@3 521 * Creates a new AVI output stream with the specified video format and
rlm@3 522 * framerate.
rlm@3 523 *
rlm@3 524 * @param out the underlying output stream
rlm@3 525 * @param format Selects an encoder for the video format.
rlm@3 526 * @exception IllegalArgumentException if videoFormat is null or if
rlm@3 527 * framerate is <= 0
rlm@3 528 */
rlm@3 529 public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
rlm@3 530 if (format == null) {
rlm@3 531 throw new IllegalArgumentException("format must not be null");
rlm@3 532 }
rlm@3 533 this.out = out;
rlm@3 534 this.streamOffset = out.getStreamPosition();
rlm@3 535 this.videoFormat = format;
rlm@3 536 this.videoFrames = new LinkedList<Sample>();
rlm@3 537 }
rlm@3 538
rlm@3 539 /**
rlm@3 540 * Used with frameRate to specify the time scale that this stream will use.
rlm@3 541 * Dividing frameRate by timeScale gives the number of samples per second.
rlm@3 542 * For video streams, this is the frame rate. For audio streams, this rate
rlm@3 543 * corresponds to the time needed to play nBlockAlign bytes of audio, which
rlm@3 544 * for PCM audio is the just the sample rate.
rlm@3 545 * <p>
rlm@3 546 * The default value is 1.
rlm@3 547 *
rlm@3 548 * @param newValue
rlm@3 549 */
rlm@3 550 public void setTimeScale(int newValue) {
rlm@3 551 if (newValue <= 0) {
rlm@3 552 throw new IllegalArgumentException("timeScale must be greater 0");
rlm@3 553 }
rlm@3 554 this.timeScale = newValue;
rlm@3 555 }
rlm@3 556
rlm@3 557 /**
rlm@3 558 * Returns the time scale of this media.
rlm@3 559 *
rlm@3 560 * @return time scale
rlm@3 561 */
rlm@3 562 public int getTimeScale() {
rlm@3 563 return timeScale;
rlm@3 564 }
rlm@3 565
rlm@3 566 /**
rlm@3 567 * Sets the rate of video frames in time scale units.
rlm@3 568 * <p>
rlm@3 569 * The default value is 30. Together with the default value 1 of timeScale
rlm@3 570 * this results in 30 frames pers second.
rlm@3 571 *
rlm@3 572 * @param newValue
rlm@3 573 */
rlm@3 574 public void setFrameRate(int newValue) {
rlm@3 575 if (newValue <= 0) {
rlm@3 576 throw new IllegalArgumentException("frameDuration must be greater 0");
rlm@3 577 }
rlm@3 578 if (state == States.STARTED) {
rlm@3 579 throw new IllegalStateException("frameDuration must be set before the first frame is written");
rlm@3 580 }
rlm@3 581 this.frameRate = newValue;
rlm@3 582 }
rlm@3 583
rlm@3 584 /**
rlm@3 585 * Returns the frame rate of this media.
rlm@3 586 *
rlm@3 587 * @return frame rate
rlm@3 588 */
rlm@3 589 public int getFrameRate() {
rlm@3 590 return frameRate;
rlm@3 591 }
rlm@3 592
rlm@3 593 /** Sets the global color palette. */
rlm@3 594 public void setPalette(IndexColorModel palette) {
rlm@3 595 this.palette = palette;
rlm@3 596 }
rlm@3 597
rlm@3 598 /**
rlm@3 599 * Sets the compression quality of the video track.
rlm@3 600 * A value of 0 stands for "high compression is important" a value of
rlm@3 601 * 1 for "high image quality is important".
rlm@3 602 * <p>
rlm@3 603 * Changing this value affects frames which are subsequently written
rlm@3 604 * to the AVIOutputStream. Frames which have already been written
rlm@3 605 * are not changed.
rlm@3 606 * <p>
rlm@3 607 * This value has only effect on videos encoded with JPG format.
rlm@3 608 * <p>
rlm@3 609 * The default value is 0.9.
rlm@3 610 *
rlm@3 611 * @param newValue
rlm@3 612 */
rlm@3 613 public void setVideoCompressionQuality(float newValue) {
rlm@3 614 this.quality = newValue;
rlm@3 615 }
rlm@3 616
rlm@3 617 /**
rlm@3 618 * Returns the video compression quality.
rlm@3 619 *
rlm@3 620 * @return video compression quality
rlm@3 621 */
rlm@3 622 public float getVideoCompressionQuality() {
rlm@3 623 return quality;
rlm@3 624 }
rlm@3 625
rlm@3 626 /**
rlm@3 627 * Sets the dimension of the video track.
rlm@3 628 * <p>
rlm@3 629 * You need to explicitly set the dimension, if you add all frames from
rlm@3 630 * files or input streams.
rlm@3 631 * <p>
rlm@3 632 * If you add frames from buffered images, then AVIOutputStream
rlm@3 633 * can determine the video dimension from the image width and height.
rlm@3 634 *
rlm@3 635 * @param width Must be greater than 0.
rlm@3 636 * @param height Must be greater than 0.
rlm@3 637 */
rlm@3 638 public void setVideoDimension(int width, int height) {
rlm@3 639 if (width < 1 || height < 1) {
rlm@3 640 throw new IllegalArgumentException("width and height must be greater zero.");
rlm@3 641 }
rlm@3 642 this.imgWidth = width;
rlm@3 643 this.imgHeight = height;
rlm@3 644 }
rlm@3 645
rlm@3 646 /**
rlm@3 647 * Gets the dimension of the video track.
rlm@3 648 * <p>
rlm@3 649 * Returns null if the dimension is not known.
rlm@3 650 */
rlm@3 651 public Dimension getVideoDimension() {
rlm@3 652 if (imgWidth < 1 || imgHeight < 1) {
rlm@3 653 return null;
rlm@3 654 }
rlm@3 655 return new Dimension(imgWidth, imgHeight);
rlm@3 656 }
rlm@3 657
rlm@3 658 /**
rlm@3 659 * Sets the state of the QuickTimeOutpuStream to started.
rlm@3 660 * <p>
rlm@3 661 * If the state is changed by this method, the prolog is
rlm@3 662 * written.
rlm@3 663 */
rlm@3 664 private void ensureStarted() throws IOException {
rlm@3 665 if (state != States.STARTED) {
rlm@3 666 creationTime = new Date();
rlm@3 667 writeProlog();
rlm@3 668 state = States.STARTED;
rlm@3 669 }
rlm@3 670 }
rlm@3 671
rlm@3 672 /**
rlm@3 673 * Writes a frame to the video track.
rlm@3 674 * <p>
rlm@3 675 * If the dimension of the video track has not been specified yet, it
rlm@3 676 * is derived from the first buffered image added to the AVIOutputStream.
rlm@3 677 *
rlm@3 678 * @param image The frame image.
rlm@3 679 *
rlm@3 680 * @throws IllegalArgumentException if the duration is less than 1, or
rlm@3 681 * if the dimension of the frame does not match the dimension of the video
rlm@3 682 * track.
rlm@3 683 * @throws IOException if writing the image failed.
rlm@3 684 */
rlm@3 685 public void writeFrame(BufferedImage image) throws IOException {
rlm@3 686 ensureOpen();
rlm@3 687 ensureStarted();
rlm@3 688
rlm@3 689 // Get the dimensions of the first image
rlm@3 690 if (imgWidth == -1) {
rlm@3 691 imgWidth = image.getWidth();
rlm@3 692 imgHeight = image.getHeight();
rlm@3 693 } else {
rlm@3 694 // The dimension of the image must match the dimension of the video track
rlm@3 695 if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
rlm@3 696 throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
rlm@3 697 + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
rlm@3 698 + ") differs from image[0] (width="
rlm@3 699 + imgWidth + ", height=" + imgHeight);
rlm@3 700 }
rlm@3 701 }
rlm@3 702
rlm@3 703 DataChunk videoFrameChunk;
rlm@3 704 long offset = getRelativeStreamPosition();
rlm@3 705 boolean isSync = true;
rlm@3 706 switch (videoFormat) {
rlm@3 707 case RAW: {
rlm@3 708 switch (imgDepth) {
rlm@3 709 case 4: {
rlm@3 710 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
rlm@3 711 int[] imgRGBs = new int[16];
rlm@3 712 imgPalette.getRGBs(imgRGBs);
rlm@3 713 int[] previousRGBs = new int[16];
rlm@3 714 if (previousPalette == null) {
rlm@3 715 previousPalette = palette;
rlm@3 716 }
rlm@3 717 previousPalette.getRGBs(previousRGBs);
rlm@3 718 if (!Arrays.equals(imgRGBs, previousRGBs)) {
rlm@3 719 previousPalette = imgPalette;
rlm@3 720 DataChunk paletteChangeChunk = new DataChunk("00pc");
rlm@3 721 /*
rlm@3 722 int first = imgPalette.getMapSize();
rlm@3 723 int last = -1;
rlm@3 724 for (int i = 0; i < 16; i++) {
rlm@3 725 if (previousRGBs[i] != imgRGBs[i] && i < first) {
rlm@3 726 first = i;
rlm@3 727 }
rlm@3 728 if (previousRGBs[i] != imgRGBs[i] && i > last) {
rlm@3 729 last = i;
rlm@3 730 }
rlm@3 731 }*/
rlm@3 732 int first = 0;
rlm@3 733 int last = imgPalette.getMapSize() - 1;
rlm@3 734 /*
rlm@3 735 * typedef struct {
rlm@3 736 BYTE bFirstEntry;
rlm@3 737 BYTE bNumEntries;
rlm@3 738 WORD wFlags;
rlm@3 739 PALETTEENTRY peNew[];
rlm@3 740 } AVIPALCHANGE;
rlm@3 741 *
rlm@3 742 * typedef struct tagPALETTEENTRY {
rlm@3 743 BYTE peRed;
rlm@3 744 BYTE peGreen;
rlm@3 745 BYTE peBlue;
rlm@3 746 BYTE peFlags;
rlm@3 747 } PALETTEENTRY;
rlm@3 748 */
rlm@3 749 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
rlm@3 750 pOut.writeByte(first);//bFirstEntry
rlm@3 751 pOut.writeByte(last - first + 1);//bNumEntries
rlm@3 752 pOut.writeShort(0);//wFlags
rlm@3 753
rlm@3 754 for (int i = first; i <= last; i++) {
rlm@3 755 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
rlm@3 756 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
rlm@3 757 pOut.writeByte(imgRGBs[i] & 0xff); // blue
rlm@3 758 pOut.writeByte(0); // reserved*/
rlm@3 759 }
rlm@3 760
rlm@3 761 moviChunk.add(paletteChangeChunk);
rlm@3 762 paletteChangeChunk.finish();
rlm@3 763 long length = getRelativeStreamPosition() - offset;
rlm@3 764 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
rlm@3 765 offset = getRelativeStreamPosition();
rlm@3 766 }
rlm@3 767
rlm@3 768 videoFrameChunk = new DataChunk("00db");
rlm@3 769 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
rlm@3 770 byte[] rgb4 = new byte[imgWidth / 2];
rlm@3 771 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
rlm@3 772 for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
rlm@3 773 rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
rlm@3 774 }
rlm@3 775 videoFrameChunk.getOutputStream().write(rgb4);
rlm@3 776 }
rlm@3 777 break;
rlm@3 778 }
rlm@3 779 case 8: {
rlm@3 780 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
rlm@3 781 int[] imgRGBs = new int[256];
rlm@3 782 imgPalette.getRGBs(imgRGBs);
rlm@3 783 int[] previousRGBs = new int[256];
rlm@3 784 if (previousPalette == null) {
rlm@3 785 previousPalette = palette;
rlm@3 786 }
rlm@3 787 previousPalette.getRGBs(previousRGBs);
rlm@3 788 if (!Arrays.equals(imgRGBs, previousRGBs)) {
rlm@3 789 previousPalette = imgPalette;
rlm@3 790 DataChunk paletteChangeChunk = new DataChunk("00pc");
rlm@3 791 /*
rlm@3 792 int first = imgPalette.getMapSize();
rlm@3 793 int last = -1;
rlm@3 794 for (int i = 0; i < 16; i++) {
rlm@3 795 if (previousRGBs[i] != imgRGBs[i] && i < first) {
rlm@3 796 first = i;
rlm@3 797 }
rlm@3 798 if (previousRGBs[i] != imgRGBs[i] && i > last) {
rlm@3 799 last = i;
rlm@3 800 }
rlm@3 801 }*/
rlm@3 802 int first = 0;
rlm@3 803 int last = imgPalette.getMapSize() - 1;
rlm@3 804 /*
rlm@3 805 * typedef struct {
rlm@3 806 BYTE bFirstEntry;
rlm@3 807 BYTE bNumEntries;
rlm@3 808 WORD wFlags;
rlm@3 809 PALETTEENTRY peNew[];
rlm@3 810 } AVIPALCHANGE;
rlm@3 811 *
rlm@3 812 * typedef struct tagPALETTEENTRY {
rlm@3 813 BYTE peRed;
rlm@3 814 BYTE peGreen;
rlm@3 815 BYTE peBlue;
rlm@3 816 BYTE peFlags;
rlm@3 817 } PALETTEENTRY;
rlm@3 818 */
rlm@3 819 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
rlm@3 820 pOut.writeByte(first);//bFirstEntry
rlm@3 821 pOut.writeByte(last - first + 1);//bNumEntries
rlm@3 822 pOut.writeShort(0);//wFlags
rlm@3 823
rlm@3 824 for (int i = first; i <= last; i++) {
rlm@3 825 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
rlm@3 826 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
rlm@3 827 pOut.writeByte(imgRGBs[i] & 0xff); // blue
rlm@3 828 pOut.writeByte(0); // reserved*/
rlm@3 829 }
rlm@3 830
rlm@3 831 moviChunk.add(paletteChangeChunk);
rlm@3 832 paletteChangeChunk.finish();
rlm@3 833 long length = getRelativeStreamPosition() - offset;
rlm@3 834 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
rlm@3 835 offset = getRelativeStreamPosition();
rlm@3 836 }
rlm@3 837
rlm@3 838 videoFrameChunk = new DataChunk("00db");
rlm@3 839 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
rlm@3 840 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
rlm@3 841 videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
rlm@3 842 }
rlm@3 843 break;
rlm@3 844 }
rlm@3 845 default: {
rlm@3 846 videoFrameChunk = new DataChunk("00db");
rlm@3 847 WritableRaster raster = image.getRaster();
rlm@3 848 int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
rlm@3 849 byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
rlm@3 850 for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
rlm@3 851 raster.getPixels(0, y, imgWidth, 1, raw);
rlm@3 852 for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
rlm@3 853 bytes[x + 2] = (byte) raw[x]; // Blue
rlm@3 854 bytes[x + 1] = (byte) raw[x + 1]; // Green
rlm@3 855 bytes[x] = (byte) raw[x + 2]; // Red
rlm@3 856 }
rlm@3 857 videoFrameChunk.getOutputStream().write(bytes);
rlm@3 858 }
rlm@3 859 break;
rlm@3 860 }
rlm@3 861 }
rlm@3 862 break;
rlm@3 863 }
rlm@3 864
rlm@3 865 case JPG: {
rlm@3 866 videoFrameChunk = new DataChunk("00dc");
rlm@3 867 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
rlm@3 868 ImageWriteParam iwParam = iw.getDefaultWriteParam();
rlm@3 869 iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
rlm@3 870 iwParam.setCompressionQuality(quality);
rlm@3 871 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
rlm@3 872 iw.setOutput(imgOut);
rlm@3 873 IIOImage img = new IIOImage(image, null, null);
rlm@3 874 iw.write(null, img, iwParam);
rlm@3 875 iw.dispose();
rlm@3 876 break;
rlm@3 877 }
rlm@3 878 case PNG:
rlm@3 879 default: {
rlm@3 880 videoFrameChunk = new DataChunk("00dc");
rlm@3 881 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
rlm@3 882 ImageWriteParam iwParam = iw.getDefaultWriteParam();
rlm@3 883 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
rlm@3 884 iw.setOutput(imgOut);
rlm@3 885 IIOImage img = new IIOImage(image, null, null);
rlm@3 886 iw.write(null, img, iwParam);
rlm@3 887 iw.dispose();
rlm@3 888 break;
rlm@3 889 }
rlm@3 890 }
rlm@3 891 long length = getRelativeStreamPosition() - offset;
rlm@3 892 moviChunk.add(videoFrameChunk);
rlm@3 893 videoFrameChunk.finish();
rlm@3 894
rlm@3 895 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
rlm@3 896 if (getRelativeStreamPosition() > 1L << 32) {
rlm@3 897 throw new IOException("AVI file is larger than 4 GB");
rlm@3 898 }
rlm@3 899 }
rlm@3 900
rlm@3 901 /**
rlm@3 902 * Writes a frame from a file to the video track.
rlm@3 903 * <p>
rlm@3 904 * This method does not inspect the contents of the file.
rlm@3 905 * For example, Its your responsibility to only add JPG files if you have
rlm@3 906 * chosen the JPEG video format.
rlm@3 907 * <p>
rlm@3 908 * If you add all frames from files or from input streams, then you
rlm@3 909 * have to explicitly set the dimension of the video track before you
rlm@3 910 * call finish() or close().
rlm@3 911 *
rlm@3 912 * @param file The file which holds the image data.
rlm@3 913 *
rlm@3 914 * @throws IllegalStateException if the duration is less than 1.
rlm@3 915 * @throws IOException if writing the image failed.
rlm@3 916 */
rlm@3 917 public void writeFrame(File file) throws IOException {
rlm@3 918 FileInputStream in = null;
rlm@3 919 try {
rlm@3 920 in = new FileInputStream(file);
rlm@3 921 writeFrame(in);
rlm@3 922 } finally {
rlm@3 923 if (in != null) {
rlm@3 924 in.close();
rlm@3 925 }
rlm@3 926 }
rlm@3 927 }
rlm@3 928
rlm@3 929 /**
rlm@3 930 * Writes a frame to the video track.
rlm@3 931 * <p>
rlm@3 932 * This method does not inspect the contents of the file.
rlm@3 933 * For example, its your responsibility to only add JPG files if you have
rlm@3 934 * chosen the JPEG video format.
rlm@3 935 * <p>
rlm@3 936 * If you add all frames from files or from input streams, then you
rlm@3 937 * have to explicitly set the dimension of the video track before you
rlm@3 938 * call finish() or close().
rlm@3 939 *
rlm@3 940 * @param in The input stream which holds the image data.
rlm@3 941 *
rlm@3 942 * @throws IllegalArgumentException if the duration is less than 1.
rlm@3 943 * @throws IOException if writing the image failed.
rlm@3 944 */
rlm@3 945 public void writeFrame(InputStream in) throws IOException {
rlm@3 946 ensureOpen();
rlm@3 947 ensureStarted();
rlm@3 948
rlm@3 949 DataChunk videoFrameChunk = new DataChunk(
rlm@3 950 videoFormat == VideoFormat.RAW ? "00db" : "00dc");
rlm@3 951 moviChunk.add(videoFrameChunk);
rlm@3 952 OutputStream mdatOut = videoFrameChunk.getOutputStream();
rlm@3 953 long offset = getRelativeStreamPosition();
rlm@3 954 byte[] buf = new byte[512];
rlm@3 955 int len;
rlm@3 956 while ((len = in.read(buf)) != -1) {
rlm@3 957 mdatOut.write(buf, 0, len);
rlm@3 958 }
rlm@3 959 long length = getRelativeStreamPosition() - offset;
rlm@3 960 videoFrameChunk.finish();
rlm@3 961 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
rlm@3 962 if (getRelativeStreamPosition() > 1L << 32) {
rlm@3 963 throw new IOException("AVI file is larger than 4 GB");
rlm@3 964 }
rlm@3 965 }
rlm@3 966
rlm@3 967 /**
rlm@3 968 * Closes the movie file as well as the stream being filtered.
rlm@3 969 *
rlm@3 970 * @exception IOException if an I/O error has occurred
rlm@3 971 */
rlm@3 972 public void close() throws IOException {
rlm@3 973 if (state == States.STARTED) {
rlm@3 974 finish();
rlm@3 975 }
rlm@3 976 if (state != States.CLOSED) {
rlm@3 977 out.close();
rlm@3 978 state = States.CLOSED;
rlm@3 979 }
rlm@3 980 }
rlm@3 981
rlm@3 982 /**
rlm@3 983 * Finishes writing the contents of the AVI output stream without closing
rlm@3 984 * the underlying stream. Use this method when applying multiple filters
rlm@3 985 * in succession to the same output stream.
rlm@3 986 *
rlm@3 987 * @exception IllegalStateException if the dimension of the video track
rlm@3 988 * has not been specified or determined yet.
rlm@3 989 * @exception IOException if an I/O exception has occurred
rlm@3 990 */
rlm@3 991 public void finish() throws IOException {
rlm@3 992 ensureOpen();
rlm@3 993 if (state != States.FINISHED) {
rlm@3 994 if (imgWidth == -1 || imgHeight == -1) {
rlm@3 995 throw new IllegalStateException("image width and height must be specified");
rlm@3 996 }
rlm@3 997
rlm@3 998 moviChunk.finish();
rlm@3 999 writeEpilog();
rlm@3 1000 state = States.FINISHED;
rlm@3 1001 imgWidth = imgHeight = -1;
rlm@3 1002 }
rlm@3 1003 }
rlm@3 1004
rlm@3 1005 /**
rlm@3 1006 * Check to make sure that this stream has not been closed
rlm@3 1007 */
rlm@3 1008 private void ensureOpen() throws IOException {
rlm@3 1009 if (state == States.CLOSED) {
rlm@3 1010 throw new IOException("Stream closed");
rlm@3 1011 }
rlm@3 1012 }
rlm@3 1013
rlm@3 1014 /** Gets the position relative to the beginning of the QuickTime stream.
rlm@3 1015 * <p>
rlm@3 1016 * Usually this value is equal to the stream position of the underlying
rlm@3 1017 * ImageOutputStream, but can be larger if the underlying stream already
rlm@3 1018 * contained data.
rlm@3 1019 *
rlm@3 1020 * @return The relative stream position.
rlm@3 1021 * @throws IOException
rlm@3 1022 */
rlm@3 1023 private long getRelativeStreamPosition() throws IOException {
rlm@3 1024 return out.getStreamPosition() - streamOffset;
rlm@3 1025 }
rlm@3 1026
rlm@3 1027 /** Seeks relative to the beginning of the QuickTime stream.
rlm@3 1028 * <p>
rlm@3 1029 * Usually this equal to seeking in the underlying ImageOutputStream, but
rlm@3 1030 * can be different if the underlying stream already contained data.
rlm@3 1031 *
rlm@3 1032 */
rlm@3 1033 private void seekRelative(long newPosition) throws IOException {
rlm@3 1034 out.seek(newPosition + streamOffset);
rlm@3 1035 }
rlm@3 1036
rlm@3 1037 private void writeProlog() throws IOException {
rlm@3 1038 // The file has the following structure:
rlm@3 1039 //
rlm@3 1040 // .RIFF AVI
rlm@3 1041 // ..avih (AVI Header Chunk)
rlm@3 1042 // ..LIST strl
rlm@3 1043 // ...strh (Stream Header Chunk)
rlm@3 1044 // ...strf (Stream Format Chunk)
rlm@3 1045 // ..LIST movi
rlm@3 1046 // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
rlm@3 1047 // ..idx1 (List of video data chunks and their location in the file)
rlm@3 1048
rlm@3 1049 // The RIFF AVI Chunk holds the complete movie
rlm@3 1050 aviChunk = new CompositeChunk("RIFF", "AVI ");
rlm@3 1051 CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
rlm@3 1052
rlm@3 1053 // Write empty AVI Main Header Chunk - we fill the data in later
rlm@3 1054 aviChunk.add(hdrlChunk);
rlm@3 1055 avihChunk = new FixedSizeDataChunk("avih", 56);
rlm@3 1056 avihChunk.seekToEndOfChunk();
rlm@3 1057 hdrlChunk.add(avihChunk);
rlm@3 1058
rlm@3 1059 CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
rlm@3 1060 hdrlChunk.add(strlChunk);
rlm@3 1061
rlm@3 1062 // Write empty AVI Stream Header Chunk - we fill the data in later
rlm@3 1063 strhChunk = new FixedSizeDataChunk("strh", 56);
rlm@3 1064 strhChunk.seekToEndOfChunk();
rlm@3 1065 strlChunk.add(strhChunk);
rlm@3 1066 strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
rlm@3 1067 strfChunk.seekToEndOfChunk();
rlm@3 1068 strlChunk.add(strfChunk);
rlm@3 1069
rlm@3 1070 moviChunk = new CompositeChunk("LIST", "movi");
rlm@3 1071 aviChunk.add(moviChunk);
rlm@3 1072
rlm@3 1073
rlm@3 1074 }
rlm@3 1075
rlm@3 1076 private void writeEpilog() throws IOException {
rlm@3 1077 // Compute values
rlm@3 1078 int duration = 0;
rlm@3 1079 for (Sample s : videoFrames) {
rlm@3 1080 duration += s.duration;
rlm@3 1081 }
rlm@3 1082 long bufferSize = 0;
rlm@3 1083 for (Sample s : videoFrames) {
rlm@3 1084 if (s.length > bufferSize) {
rlm@3 1085 bufferSize = s.length;
rlm@3 1086 }
rlm@3 1087 }
rlm@3 1088
rlm@3 1089
rlm@3 1090 DataChunkOutputStream d;
rlm@3 1091
rlm@3 1092 /* Create Idx1 Chunk and write data
rlm@3 1093 * -------------
rlm@3 1094 typedef struct _avioldindex {
rlm@3 1095 FOURCC fcc;
rlm@3 1096 DWORD cb;
rlm@3 1097 struct _avioldindex_entry {
rlm@3 1098 DWORD dwChunkId;
rlm@3 1099 DWORD dwFlags;
rlm@3 1100 DWORD dwOffset;
rlm@3 1101 DWORD dwSize;
rlm@3 1102 } aIndex[];
rlm@3 1103 } AVIOLDINDEX;
rlm@3 1104 */
rlm@3 1105 DataChunk idx1Chunk = new DataChunk("idx1");
rlm@3 1106 aviChunk.add(idx1Chunk);
rlm@3 1107 d = idx1Chunk.getOutputStream();
rlm@3 1108 long moviListOffset = moviChunk.offset + 8;
rlm@3 1109 //moviListOffset = 0;
rlm@3 1110 for (Sample f : videoFrames) {
rlm@3 1111
rlm@3 1112 d.writeType(f.chunkType); // dwChunkId
rlm@3 1113 // Specifies a FOURCC that identifies a stream in the AVI file. The
rlm@3 1114 // FOURCC must have the form 'xxyy' where xx is the stream number and yy
rlm@3 1115 // is a two-character code that identifies the contents of the stream:
rlm@3 1116 //
rlm@3 1117 // Two-character code Description
rlm@3 1118 // db Uncompressed video frame
rlm@3 1119 // dc Compressed video frame
rlm@3 1120 // pc Palette change
rlm@3 1121 // wb Audio data
rlm@3 1122
rlm@3 1123 d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
rlm@3 1124 | (f.isSync ? 0x10 : 0x0)); // dwFlags
rlm@3 1125 // Specifies a bitwise combination of zero or more of the following
rlm@3 1126 // flags:
rlm@3 1127 //
rlm@3 1128 // Value Name Description
rlm@3 1129 // 0x10 AVIIF_KEYFRAME The data chunk is a key frame.
rlm@3 1130 // 0x1 AVIIF_LIST The data chunk is a 'rec ' list.
rlm@3 1131 // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the
rlm@3 1132 // stream. For example, this flag should be set for
rlm@3 1133 // palette changes.
rlm@3 1134
rlm@3 1135 d.writeUInt(f.offset - moviListOffset); // dwOffset
rlm@3 1136 // Specifies the location of the data chunk in the file. The value
rlm@3 1137 // should be specified as an offset, in bytes, from the start of the
rlm@3 1138 // 'movi' list; however, in some AVI files it is given as an offset from
rlm@3 1139 // the start of the file.
rlm@3 1140
rlm@3 1141 d.writeUInt(f.length); // dwSize
rlm@3 1142 // Specifies the size of the data chunk, in bytes.
rlm@3 1143 }
rlm@3 1144 idx1Chunk.finish();
rlm@3 1145
rlm@3 1146 /* Write Data into AVI Main Header Chunk
rlm@3 1147 * -------------
rlm@3 1148 * The AVIMAINHEADER structure defines global information in an AVI file.
rlm@3 1149 * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
rlm@3 1150 typedef struct _avimainheader {
rlm@3 1151 FOURCC fcc;
rlm@3 1152 DWORD cb;
rlm@3 1153 DWORD dwMicroSecPerFrame;
rlm@3 1154 DWORD dwMaxBytesPerSec;
rlm@3 1155 DWORD dwPaddingGranularity;
rlm@3 1156 DWORD dwFlags;
rlm@3 1157 DWORD dwTotalFrames;
rlm@3 1158 DWORD dwInitialFrames;
rlm@3 1159 DWORD dwStreams;
rlm@3 1160 DWORD dwSuggestedBufferSize;
rlm@3 1161 DWORD dwWidth;
rlm@3 1162 DWORD dwHeight;
rlm@3 1163 DWORD dwReserved[4];
rlm@3 1164 } AVIMAINHEADER; */
rlm@3 1165 avihChunk.seekToStartOfData();
rlm@3 1166 d = avihChunk.getOutputStream();
rlm@3 1167
rlm@3 1168 d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
rlm@3 1169 // Specifies the number of microseconds between frames.
rlm@3 1170 // This value indicates the overall timing for the file.
rlm@3 1171
rlm@3 1172 d.writeUInt(0); // dwMaxBytesPerSec
rlm@3 1173 // Specifies the approximate maximum data rate of the file.
rlm@3 1174 // This value indicates the number of bytes per second the system
rlm@3 1175 // must handle to present an AVI sequence as specified by the other
rlm@3 1176 // parameters contained in the main header and stream header chunks.
rlm@3 1177
rlm@3 1178 d.writeUInt(0); // dwPaddingGranularity
rlm@3 1179 // Specifies the alignment for data, in bytes. Pad the data to multiples
rlm@3 1180 // of this value.
rlm@3 1181
rlm@3 1182 d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
rlm@3 1183 // Contains a bitwise combination of zero or more of the following
rlm@3 1184 // flags:
rlm@3 1185 //
rlm@3 1186 // Value Name Description
rlm@3 1187 // 0x10 AVIF_HASINDEX Indicates the AVI file has an index.
rlm@3 1188 // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the
rlm@3 1189 // index, rather than the physical ordering of the
rlm@3 1190 // chunks in the file, to determine the order of
rlm@3 1191 // presentation of the data. For example, this flag
rlm@3 1192 // could be used to create a list of frames for
rlm@3 1193 // editing.
rlm@3 1194 // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
rlm@3 1195 // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
rlm@3 1196 // allocated file used for capturing real-time
rlm@3 1197 // video. Applications should warn the user before
rlm@3 1198 // writing over a file with this flag set because
rlm@3 1199 // the user probably defragmented this file.
rlm@3 1200 // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
rlm@3 1201 // data and software. When this flag is used,
rlm@3 1202 // software should not permit the data to be
rlm@3 1203 // duplicated.
rlm@3 1204
rlm@3 1205 d.writeUInt(videoFrames.size()); // dwTotalFrames
rlm@3 1206 // Specifies the total number of frames of data in the file.
rlm@3 1207
rlm@3 1208 d.writeUInt(0); // dwInitialFrames
rlm@3 1209 // Specifies the initial frame for interleaved files. Noninterleaved
rlm@3 1210 // files should specify zero. If you are creating interleaved files,
rlm@3 1211 // specify the number of frames in the file prior to the initial frame
rlm@3 1212 // of the AVI sequence in this member.
rlm@3 1213 // To give the audio driver enough audio to work with, the audio data in
rlm@3 1214 // an interleaved file must be skewed from the video data. Typically,
rlm@3 1215 // the audio data should be moved forward enough frames to allow
rlm@3 1216 // approximately 0.75 seconds of audio data to be preloaded. The
rlm@3 1217 // dwInitialRecords member should be set to the number of frames the
rlm@3 1218 // audio is skewed. Also set the same value for the dwInitialFrames
rlm@3 1219 // member of the AVISTREAMHEADER structure in the audio stream header
rlm@3 1220
rlm@3 1221 d.writeUInt(1); // dwStreams
rlm@3 1222 // Specifies the number of streams in the file. For example, a file with
rlm@3 1223 // audio and video has two streams.
rlm@3 1224
rlm@3 1225 d.writeUInt(bufferSize); // dwSuggestedBufferSize
rlm@3 1226 // Specifies the suggested buffer size for reading the file. Generally,
rlm@3 1227 // this size should be large enough to contain the largest chunk in the
rlm@3 1228 // file. If set to zero, or if it is too small, the playback software
rlm@3 1229 // will have to reallocate memory during playback, which will reduce
rlm@3 1230 // performance. For an interleaved file, the buffer size should be large
rlm@3 1231 // enough to read an entire record, and not just a chunk.
rlm@3 1232
rlm@3 1233
rlm@3 1234 d.writeUInt(imgWidth); // dwWidth
rlm@3 1235 // Specifies the width of the AVI file in pixels.
rlm@3 1236
rlm@3 1237 d.writeUInt(imgHeight); // dwHeight
rlm@3 1238 // Specifies the height of the AVI file in pixels.
rlm@3 1239
rlm@3 1240 d.writeUInt(0); // dwReserved[0]
rlm@3 1241 d.writeUInt(0); // dwReserved[1]
rlm@3 1242 d.writeUInt(0); // dwReserved[2]
rlm@3 1243 d.writeUInt(0); // dwReserved[3]
rlm@3 1244 // Reserved. Set this array to zero.
rlm@3 1245
rlm@3 1246 /* Write Data into AVI Stream Header Chunk
rlm@3 1247 * -------------
rlm@3 1248 * The AVISTREAMHEADER structure contains information about one stream
rlm@3 1249 * in an AVI file.
rlm@3 1250 * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
rlm@3 1251 typedef struct _avistreamheader {
rlm@3 1252 FOURCC fcc;
rlm@3 1253 DWORD cb;
rlm@3 1254 FOURCC fccType;
rlm@3 1255 FOURCC fccHandler;
rlm@3 1256 DWORD dwFlags;
rlm@3 1257 WORD wPriority;
rlm@3 1258 WORD wLanguage;
rlm@3 1259 DWORD dwInitialFrames;
rlm@3 1260 DWORD dwScale;
rlm@3 1261 DWORD dwRate;
rlm@3 1262 DWORD dwStart;
rlm@3 1263 DWORD dwLength;
rlm@3 1264 DWORD dwSuggestedBufferSize;
rlm@3 1265 DWORD dwQuality;
rlm@3 1266 DWORD dwSampleSize;
rlm@3 1267 struct {
rlm@3 1268 short int left;
rlm@3 1269 short int top;
rlm@3 1270 short int right;
rlm@3 1271 short int bottom;
rlm@3 1272 } rcFrame;
rlm@3 1273 } AVISTREAMHEADER;
rlm@3 1274 */
rlm@3 1275 strhChunk.seekToStartOfData();
rlm@3 1276 d = strhChunk.getOutputStream();
rlm@3 1277 d.writeType("vids"); // fccType - vids for video stream
rlm@3 1278 // Contains a FOURCC that specifies the type of the data contained in
rlm@3 1279 // the stream. The following standard AVI values for video and audio are
rlm@3 1280 // defined:
rlm@3 1281 //
rlm@3 1282 // FOURCC Description
rlm@3 1283 // 'auds' Audio stream
rlm@3 1284 // 'mids' MIDI stream
rlm@3 1285 // 'txts' Text stream
rlm@3 1286 // 'vids' Video stream
rlm@3 1287
rlm@3 1288 switch (videoFormat) {
rlm@3 1289 case RAW:
rlm@3 1290 d.writeType("DIB "); // fccHandler - DIB for Raw RGB
rlm@3 1291 break;
rlm@3 1292 case RLE:
rlm@3 1293 d.writeType("RLE "); // fccHandler - Microsoft RLE
rlm@3 1294 break;
rlm@3 1295 case JPG:
rlm@3 1296 d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
rlm@3 1297 break;
rlm@3 1298 case PNG:
rlm@3 1299 default:
rlm@3 1300 d.writeType("png "); // fccHandler - png for PNG
rlm@3 1301 break;
rlm@3 1302 }
rlm@3 1303 // Optionally, contains a FOURCC that identifies a specific data
rlm@3 1304 // handler. The data handler is the preferred handler for the stream.
rlm@3 1305 // For audio and video streams, this specifies the codec for decoding
rlm@3 1306 // the stream.
rlm@3 1307
rlm@3 1308 if (imgDepth <= 8) {
rlm@3 1309 d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
rlm@3 1310 } else {
rlm@3 1311 d.writeUInt(0); // dwFlags
rlm@3 1312 }
rlm@3 1313
rlm@3 1314 // Contains any flags for the data stream. The bits in the high-order
rlm@3 1315 // word of these flags are specific to the type of data contained in the
rlm@3 1316 // stream. The following standard flags are defined:
rlm@3 1317 //
rlm@3 1318 // Value Name Description
rlm@3 1319 // AVISF_DISABLED 0x00000001 Indicates this stream should not
rlm@3 1320 // be enabled by default.
rlm@3 1321 // AVISF_VIDEO_PALCHANGES 0x00010000
rlm@3 1322 // Indicates this video stream contains
rlm@3 1323 // palette changes. This flag warns the playback
rlm@3 1324 // software that it will need to animate the
rlm@3 1325 // palette.
rlm@3 1326
rlm@3 1327 d.writeUShort(0); // wPriority
rlm@3 1328 // Specifies priority of a stream type. For example, in a file with
rlm@3 1329 // multiple audio streams, the one with the highest priority might be
rlm@3 1330 // the default stream.
rlm@3 1331
rlm@3 1332 d.writeUShort(0); // wLanguage
rlm@3 1333 // Language tag.
rlm@3 1334
rlm@3 1335 d.writeUInt(0); // dwInitialFrames
rlm@3 1336 // Specifies how far audio data is skewed ahead of the video frames in
rlm@3 1337 // interleaved files. Typically, this is about 0.75 seconds. If you are
rlm@3 1338 // creating interleaved files, specify the number of frames in the file
rlm@3 1339 // prior to the initial frame of the AVI sequence in this member. For
rlm@3 1340 // more information, see the remarks for the dwInitialFrames member of
rlm@3 1341 // the AVIMAINHEADER structure.
rlm@3 1342
rlm@3 1343 d.writeUInt(timeScale); // dwScale
rlm@3 1344 // Used with dwRate to specify the time scale that this stream will use.
rlm@3 1345 // Dividing dwRate by dwScale gives the number of samples per second.
rlm@3 1346 // For video streams, this is the frame rate. For audio streams, this
rlm@3 1347 // rate corresponds to the time needed to play nBlockAlign bytes of
rlm@3 1348 // audio, which for PCM audio is the just the sample rate.
rlm@3 1349
rlm@3 1350 d.writeUInt(frameRate); // dwRate
rlm@3 1351 // See dwScale.
rlm@3 1352
rlm@3 1353 d.writeUInt(0); // dwStart
rlm@3 1354 // Specifies the starting time for this stream. The units are defined by
rlm@3 1355 // the dwRate and dwScale members in the main file header. Usually, this
rlm@3 1356 // is zero, but it can specify a delay time for a stream that does not
rlm@3 1357 // start concurrently with the file.
rlm@3 1358
rlm@3 1359 d.writeUInt(videoFrames.size()); // dwLength
rlm@3 1360 // Specifies the length of this stream. The units are defined by the
rlm@3 1361 // dwRate and dwScale members of the stream's header.
rlm@3 1362
rlm@3 1363 d.writeUInt(bufferSize); // dwSuggestedBufferSize
rlm@3 1364 // Specifies how large a buffer should be used to read this stream.
rlm@3 1365 // Typically, this contains a value corresponding to the largest chunk
rlm@3 1366 // present in the stream. Using the correct buffer size makes playback
rlm@3 1367 // more efficient. Use zero if you do not know the correct buffer size.
rlm@3 1368
rlm@3 1369 d.writeInt(-1); // dwQuality
rlm@3 1370 // Specifies an indicator of the quality of the data in the stream.
rlm@3 1371 // Quality is represented as a number between 0 and 10,000.
rlm@3 1372 // For compressed data, this typically represents the value of the
rlm@3 1373 // quality parameter passed to the compression software. If set to –1,
rlm@3 1374 // drivers use the default quality value.
rlm@3 1375
rlm@3 1376 d.writeUInt(0); // dwSampleSize
rlm@3 1377 // Specifies the size of a single sample of data. This is set to zero
rlm@3 1378 // if the samples can vary in size. If this number is nonzero, then
rlm@3 1379 // multiple samples of data can be grouped into a single chunk within
rlm@3 1380 // the file. If it is zero, each sample of data (such as a video frame)
rlm@3 1381 // must be in a separate chunk. For video streams, this number is
rlm@3 1382 // typically zero, although it can be nonzero if all video frames are
rlm@3 1383 // the same size. For audio streams, this number should be the same as
rlm@3 1384 // the nBlockAlign member of the WAVEFORMATEX structure describing the
rlm@3 1385 // audio.
rlm@3 1386
rlm@3 1387 d.writeUShort(0); // rcFrame.left
rlm@3 1388 d.writeUShort(0); // rcFrame.top
rlm@3 1389 d.writeUShort(imgWidth); // rcFrame.right
rlm@3 1390 d.writeUShort(imgHeight); // rcFrame.bottom
rlm@3 1391 // Specifies the destination rectangle for a text or video stream within
rlm@3 1392 // the movie rectangle specified by the dwWidth and dwHeight members of
rlm@3 1393 // the AVI main header structure. The rcFrame member is typically used
rlm@3 1394 // in support of multiple video streams. Set this rectangle to the
rlm@3 1395 // coordinates corresponding to the movie rectangle to update the whole
rlm@3 1396 // movie rectangle. Units for this member are pixels. The upper-left
rlm@3 1397 // corner of the destination rectangle is relative to the upper-left
rlm@3 1398 // corner of the movie rectangle.
rlm@3 1399
rlm@3 1400 /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
rlm@3 1401 /* -------------
rlm@3 1402 * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
rlm@3 1403 typedef struct tagBITMAPINFOHEADER {
rlm@3 1404 DWORD biSize;
rlm@3 1405 LONG biWidth;
rlm@3 1406 LONG biHeight;
rlm@3 1407 WORD biPlanes;
rlm@3 1408 WORD biBitCount;
rlm@3 1409 DWORD biCompression;
rlm@3 1410 DWORD biSizeImage;
rlm@3 1411 LONG biXPelsPerMeter;
rlm@3 1412 LONG biYPelsPerMeter;
rlm@3 1413 DWORD biClrUsed;
rlm@3 1414 DWORD biClrImportant;
rlm@3 1415 } BITMAPINFOHEADER;
rlm@3 1416 */
rlm@3 1417 strfChunk.seekToStartOfData();
rlm@3 1418 d = strfChunk.getOutputStream();
rlm@3 1419 d.writeUInt(40); // biSize
rlm@3 1420 // Specifies the number of bytes required by the structure. This value
rlm@3 1421 // does not include the size of the color table or the size of the color
rlm@3 1422 // masks, if they are appended to the end of structure.
rlm@3 1423
rlm@3 1424 d.writeInt(imgWidth); // biWidth
rlm@3 1425 // Specifies the width of the bitmap, in pixels.
rlm@3 1426
rlm@3 1427 d.writeInt(imgHeight); // biHeight
rlm@3 1428 // Specifies the height of the bitmap, in pixels.
rlm@3 1429 //
rlm@3 1430 // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
rlm@3 1431 // a bottom-up DIB with the origin at the lower left corner. If biHeight
rlm@3 1432 // is negative, the bitmap is a top-down DIB with the origin at the
rlm@3 1433 // upper left corner.
rlm@3 1434 // For YUV bitmaps, the bitmap is always top-down, regardless of the
rlm@3 1435 // sign of biHeight. Decoders should offer YUV formats with postive
rlm@3 1436 // biHeight, but for backward compatibility they should accept YUV
rlm@3 1437 // formats with either positive or negative biHeight.
rlm@3 1438 // For compressed formats, biHeight must be positive, regardless of
rlm@3 1439 // image orientation.
rlm@3 1440
rlm@3 1441 d.writeShort(1); // biPlanes
rlm@3 1442 // Specifies the number of planes for the target device. This value must
rlm@3 1443 // be set to 1.
rlm@3 1444
rlm@3 1445 d.writeShort(imgDepth); // biBitCount
rlm@3 1446 // Specifies the number of bits per pixel (bpp). For uncompressed
rlm@3 1447 // formats, this value is the average number of bits per pixel. For
rlm@3 1448 // compressed formats, this value is the implied bit depth of the
rlm@3 1449 // uncompressed image, after the image has been decoded.
rlm@3 1450
rlm@3 1451 switch (videoFormat) {
rlm@3 1452 case RAW:
rlm@3 1453 default:
rlm@3 1454 d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
rlm@3 1455 break;
rlm@3 1456 case RLE:
rlm@3 1457 if (imgDepth == 8) {
rlm@3 1458 d.writeInt(1); // biCompression - BI_RLE8
rlm@3 1459 } else if (imgDepth == 4) {
rlm@3 1460 d.writeInt(2); // biCompression - BI_RLE4
rlm@3 1461 } else {
rlm@3 1462 throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
rlm@3 1463 }
rlm@3 1464 break;
rlm@3 1465 case JPG:
rlm@3 1466 d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
rlm@3 1467 break;
rlm@3 1468 case PNG:
rlm@3 1469 d.writeType("png "); // biCompression - png for PNG
rlm@3 1470 break;
rlm@3 1471 }
rlm@3 1472 // For compressed video and YUV formats, this member is a FOURCC code,
rlm@3 1473 // specified as a DWORD in little-endian order. For example, YUYV video
rlm@3 1474 // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
rlm@3 1475 // Codes.
rlm@3 1476 //
rlm@3 1477 // For uncompressed RGB formats, the following values are possible:
rlm@3 1478 //
rlm@3 1479 // Value Description
rlm@3 1480 // BI_RGB 0x00000000 Uncompressed RGB.
rlm@3 1481 // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
rlm@3 1482 // Valid for 16-bpp and 32-bpp bitmaps.
rlm@3 1483 //
rlm@3 1484 // Note that BI_JPG and BI_PNG are not valid video formats.
rlm@3 1485 //
rlm@3 1486 // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
rlm@3 1487 // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
rlm@3 1488 // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
rlm@3 1489 // structure to determine the specific RGB type.
rlm@3 1490
rlm@3 1491 switch (videoFormat) {
rlm@3 1492 case RAW:
rlm@3 1493 d.writeInt(0); // biSizeImage
rlm@3 1494 break;
rlm@3 1495 case RLE:
rlm@3 1496 case JPG:
rlm@3 1497 case PNG:
rlm@3 1498 default:
rlm@3 1499 if (imgDepth == 4) {
rlm@3 1500 d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
rlm@3 1501 } else {
rlm@3 1502 int bytesPerPixel = Math.max(1, imgDepth / 8);
rlm@3 1503 d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
rlm@3 1504 }
rlm@3 1505 break;
rlm@3 1506 }
rlm@3 1507 // Specifies the size, in bytes, of the image. This can be set to 0 for
rlm@3 1508 // uncompressed RGB bitmaps.
rlm@3 1509
rlm@3 1510 d.writeInt(0); // biXPelsPerMeter
rlm@3 1511 // Specifies the horizontal resolution, in pixels per meter, of the
rlm@3 1512 // target device for the bitmap.
rlm@3 1513
rlm@3 1514 d.writeInt(0); // biYPelsPerMeter
rlm@3 1515 // Specifies the vertical resolution, in pixels per meter, of the target
rlm@3 1516 // device for the bitmap.
rlm@3 1517
rlm@3 1518 d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
rlm@3 1519 // Specifies the number of color indices in the color table that are
rlm@3 1520 // actually used by the bitmap.
rlm@3 1521
rlm@3 1522 d.writeInt(0); // biClrImportant
rlm@3 1523 // Specifies the number of color indices that are considered important
rlm@3 1524 // for displaying the bitmap. If this value is zero, all colors are
rlm@3 1525 // important.
rlm@3 1526
rlm@3 1527 if (palette != null) {
rlm@3 1528 for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
rlm@3 1529 /*
rlm@3 1530 * typedef struct tagRGBQUAD {
rlm@3 1531 BYTE rgbBlue;
rlm@3 1532 BYTE rgbGreen;
rlm@3 1533 BYTE rgbRed;
rlm@3 1534 BYTE rgbReserved; // This member is reserved and must be zero.
rlm@3 1535 } RGBQUAD;
rlm@3 1536 */
rlm@3 1537 d.write(palette.getBlue(i));
rlm@3 1538 d.write(palette.getGreen(i));
rlm@3 1539 d.write(palette.getRed(i));
rlm@3 1540 d.write(0);
rlm@3 1541 }
rlm@3 1542 }
rlm@3 1543
rlm@3 1544
rlm@3 1545 // -----------------
rlm@3 1546 aviChunk.finish();
rlm@3 1547 }
rlm@3 1548 }