Mercurial > jmeCapture
view src/com/aurellem/capture/AVIOutputStream.java @ 3:a92de00f0414
migrating files
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Tue, 25 Oct 2011 11:55:55 -0700 |
parents | |
children |
line wrap: on
line source
1 /**2 * @(#)AVIOutputStream.java 1.5.1 2011-01-173 *4 * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.5 * All rights reserved.6 *7 * You may not use, copy or modify this file, except in compliance with the8 * license agreement you entered into with Werner Randelshofer.9 * For details see accompanying license terms.10 */11 package com.aurellem.capture;13 import java.awt.Dimension;14 import java.awt.image.BufferedImage;15 import java.awt.image.DataBufferByte;16 import java.awt.image.IndexColorModel;17 import java.awt.image.WritableRaster;18 import java.io.File;19 import java.io.FileInputStream;20 import java.io.IOException;21 import java.io.InputStream;22 import java.io.OutputStream;23 import java.util.Arrays;24 import java.util.Date;25 import java.util.LinkedList;27 import javax.imageio.IIOImage;28 import javax.imageio.ImageIO;29 import javax.imageio.ImageWriteParam;30 import javax.imageio.ImageWriter;31 import javax.imageio.stream.FileImageOutputStream;32 import javax.imageio.stream.ImageOutputStream;33 import javax.imageio.stream.MemoryCacheImageOutputStream;35 /**36 * This class supports writing of images into an AVI 1.0 video file.37 * <p>38 * The images are written as video frames.39 * <p>40 * Video frames can be encoded with one of the following formats:41 * <ul>42 * <li>JPEG</li>43 * <li>PNG</li>44 * <li>RAW</li>45 * <li>RLE</li>46 * </ul>47 * All frames must have the same format.48 * When JPG is used each frame can have an individual encoding quality.49 * <p>50 * All frames in an AVI file must have the same duration. The duration can51 * be set by setting an appropriate pair of values using methods52 * {@link #setFrameRate} and {@link #setTimeScale}.53 * <p>54 * The length of an AVI 1.0 file is limited to 1 GB.55 * This class supports lengths of up to 4 GB, but such files may not work on56 * all players.57 * <p>58 * For detailed information about the AVI RIFF file format see:<br>59 * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>60 * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>61 * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>62 *63 * @author Werner Randelshofer64 * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..65 * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.66 * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets67 * in "idx1" chunk.68 * <br>1.3.2 2010-12-27 File size limit is 1 GB.69 * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.70 * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.71 * Added method getVideoDimension().72 * <br>1.2 2009-08-29 Adds support for RAW video format.73 * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih74 * chunk. Changed the API to reflect that AVI works with frame rates instead of75 * with frame durations.76 * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG77 * encoded video.78 * <br>1.0 2008-08-11 Created.79 */80 public class AVIOutputStream {82 /**83 * Underlying output stream.84 */85 private ImageOutputStream out;86 /** The offset of the QuickTime stream in the underlying ImageOutputStream.87 * Normally this is 0 unless the underlying stream already contained data88 * when it was passed to the constructor.89 */90 private long streamOffset;91 /** Previous frame for delta compression. */92 private Object previousData;94 /**95 * Supported video encodings.96 */97 public static enum VideoFormat {99 RAW, RLE, JPG, PNG;100 }101 /**102 * Current video formats.103 */104 private VideoFormat videoFormat;105 /**106 * Quality of JPEG encoded video frames.107 */108 private float quality = 0.9f;109 /**110 * Creation time of the movie output stream.111 */112 private Date creationTime;113 /**114 * Width of the video frames. All frames must have the same width.115 * The value -1 is used to mark unspecified width.116 */117 private int imgWidth = -1;118 /**119 * Height of the video frames. All frames must have the same height.120 * The value -1 is used to mark unspecified height.121 */122 private int imgHeight = -1;123 /** Number of bits per pixel. */124 private int imgDepth = 24;125 /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */126 private IndexColorModel palette;127 private IndexColorModel previousPalette;128 /** Video encoder. */130 /**131 * The timeScale of the movie.132 * <p>133 * Used with frameRate to specify the time scale that this stream will use.134 * Dividing frameRate by timeScale gives the number of samples per second.135 * For video streams, this is the frame rate. For audio streams, this rate136 * corresponds to the time needed to play nBlockAlign bytes of audio, which137 * for PCM audio is the just the sample rate.138 */139 private int timeScale = 1;140 /**141 * The frameRate of the movie in timeScale units.142 * <p>143 * @see timeScale144 */145 private int frameRate = 30;146 /** Interval between keyframes. */147 private int syncInterval = 30;149 /**150 * The states of the movie output stream.151 */152 private static enum States {154 STARTED, FINISHED, CLOSED;155 }156 /**157 * The current state of the movie output stream.158 */159 private States state = States.FINISHED;161 /**162 * AVI stores media data in samples.163 * A sample is a single element in a sequence of time-ordered data.164 */165 private static class Sample {167 String chunkType;168 /** Offset of the sample relative to the start of the AVI file.169 */170 long offset;171 /** Data length of the sample. */172 long length;173 /**174 * The duration of the sample in time scale units.175 */176 int duration;177 /** Whether the sample is a sync-sample. */178 boolean isSync;180 /**181 * Creates a new sample.182 * @param duration183 * @param offset184 * @param length185 */186 public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {187 this.chunkType = chunkId;188 this.duration = duration;189 this.offset = offset;190 this.length = length;191 this.isSync = isSync;192 }193 }194 /**195 * List of video frames.196 */197 private LinkedList<Sample> videoFrames;198 /**199 * This chunk holds the whole AVI content.200 */201 private CompositeChunk aviChunk;202 /**203 * This chunk holds the movie frames.204 */205 private CompositeChunk moviChunk;206 /**207 * This chunk holds the AVI Main Header.208 */209 FixedSizeDataChunk avihChunk;210 /**211 * This chunk holds the AVI Stream Header.212 */213 FixedSizeDataChunk strhChunk;214 /**215 * This chunk holds the AVI Stream Format Header.216 */217 FixedSizeDataChunk strfChunk;219 /**220 * Chunk base class.221 */222 private abstract class Chunk {224 /**225 * The chunkType of the chunk. A String with the length of 4 characters.226 */227 protected String chunkType;228 /**229 * The offset of the chunk relative to the start of the230 * ImageOutputStream.231 */232 protected long offset;234 /**235 * Creates a new Chunk at the current position of the ImageOutputStream.236 * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.237 */238 public Chunk(String chunkType) throws IOException {239 this.chunkType = chunkType;240 offset = getRelativeStreamPosition();241 }243 /**244 * Writes the chunk to the ImageOutputStream and disposes it.245 */246 public abstract void finish() throws IOException;248 /**249 * Returns the size of the chunk including the size of the chunk header.250 * @return The size of the chunk.251 */252 public abstract long size();253 }255 /**256 * A CompositeChunk contains an ordered list of Chunks.257 */258 private class CompositeChunk extends Chunk {260 /**261 * The type of the composite. A String with the length of 4 characters.262 */263 protected String compositeType;264 private LinkedList<Chunk> children;265 private boolean finished;267 /**268 * Creates a new CompositeChunk at the current position of the269 * ImageOutputStream.270 * @param compositeType The type of the composite.271 * @param chunkType The type of the chunk.272 */273 public CompositeChunk(String compositeType, String chunkType) throws IOException {274 super(chunkType);275 this.compositeType = compositeType;276 //out.write277 out.writeLong(0); // make room for the chunk header278 out.writeInt(0); // make room for the chunk header279 children = new LinkedList<Chunk>();280 }282 public void add(Chunk child) throws IOException {283 if (children.size() > 0) {284 children.getLast().finish();285 }286 children.add(child);287 }289 /**290 * Writes the chunk and all its children to the ImageOutputStream291 * and disposes of all resources held by the chunk.292 * @throws java.io.IOException293 */294 @Override295 public void finish() throws IOException {296 if (!finished) {297 if (size() > 0xffffffffL) {298 throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());299 }301 long pointer = getRelativeStreamPosition();302 seekRelative(offset);304 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);305 headerData.writeType(compositeType);306 headerData.writeUInt(size() - 8);307 headerData.writeType(chunkType);308 for (Chunk child : children) {309 child.finish();310 }311 seekRelative(pointer);312 if (size() % 2 == 1) {313 out.writeByte(0); // write pad byte314 }315 finished = true;316 }317 }319 @Override320 public long size() {321 long length = 12;322 for (Chunk child : children) {323 length += child.size() + child.size() % 2;324 }325 return length;326 }327 }329 /**330 * Data Chunk.331 */332 private class DataChunk extends Chunk {334 private DataChunkOutputStream data;335 private boolean finished;337 /**338 * Creates a new DataChunk at the current position of the339 * ImageOutputStream.340 * @param chunkType The chunkType of the chunk.341 */342 public DataChunk(String name) throws IOException {343 super(name);344 out.writeLong(0); // make room for the chunk header345 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);346 }348 public DataChunkOutputStream getOutputStream() {349 if (finished) {350 throw new IllegalStateException("DataChunk is finished");351 }352 return data;353 }355 /**356 * Returns the offset of this chunk to the beginning of the random access file357 * @return358 */359 public long getOffset() {360 return offset;361 }363 @Override364 public void finish() throws IOException {365 if (!finished) {366 long sizeBefore = size();368 if (size() > 0xffffffffL) {369 throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());370 }372 long pointer = getRelativeStreamPosition();373 seekRelative(offset);375 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);376 headerData.writeType(chunkType);377 headerData.writeUInt(size() - 8);378 seekRelative(pointer);379 if (size() % 2 == 1) {380 out.writeByte(0); // write pad byte381 }382 finished = true;383 long sizeAfter = size();384 if (sizeBefore != sizeAfter) {385 System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);386 }387 }388 }390 @Override391 public long size() {392 return 8 + data.size();393 }394 }396 /**397 * A DataChunk with a fixed size.398 */399 private class FixedSizeDataChunk extends Chunk {401 private DataChunkOutputStream data;402 private boolean finished;403 private long fixedSize;405 /**406 * Creates a new DataChunk at the current position of the407 * ImageOutputStream.408 * @param chunkType The chunkType of the chunk.409 */410 public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {411 super(chunkType);412 this.fixedSize = fixedSize;413 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);414 data.writeType(chunkType);415 data.writeUInt(fixedSize);416 data.clearCount();418 // Fill fixed size with nulls419 byte[] buf = new byte[(int) Math.min(512, fixedSize)];420 long written = 0;421 while (written < fixedSize) {422 data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));423 written += Math.min(buf.length, fixedSize - written);424 }425 if (fixedSize % 2 == 1) {426 out.writeByte(0); // write pad byte427 }428 seekToStartOfData();429 }431 public DataChunkOutputStream getOutputStream() {432 /*if (finished) {433 throw new IllegalStateException("DataChunk is finished");434 }*/435 return data;436 }438 /**439 * Returns the offset of this chunk to the beginning of the random access file440 * @return441 */442 public long getOffset() {443 return offset;444 }446 public void seekToStartOfData() throws IOException {447 seekRelative(offset + 8);448 data.clearCount();449 }451 public void seekToEndOfChunk() throws IOException {452 seekRelative(offset + 8 + fixedSize + fixedSize % 2);453 }455 @Override456 public void finish() throws IOException {457 if (!finished) {458 finished = true;459 }460 }462 @Override463 public long size() {464 return 8 + fixedSize;465 }466 }468 /**469 * Creates a new AVI file with the specified video format and470 * frame rate. The video has 24 bits per pixel.471 *472 * @param file the output file473 * @param format Selects an encoder for the video format.474 * @param bitsPerPixel the number of bits per pixel.475 * @exception IllegalArgumentException if videoFormat is null or if476 * frame rate is <= 0477 */478 public AVIOutputStream(File file, VideoFormat format) throws IOException {479 this(file,format,24);480 }481 /**482 * Creates a new AVI file with the specified video format and483 * frame rate.484 *485 * @param file the output file486 * @param format Selects an encoder for the video format.487 * @param bitsPerPixel the number of bits per pixel.488 * @exception IllegalArgumentException if videoFormat is null or if489 * frame rate is <= 0490 */491 public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {492 if (format == null) {493 throw new IllegalArgumentException("format must not be null");494 }496 if (file.exists()) {497 file.delete();498 }499 this.out = new FileImageOutputStream(file);500 this.streamOffset = 0;501 this.videoFormat = format;502 this.videoFrames = new LinkedList<Sample>();503 this.imgDepth = bitsPerPixel;504 if (imgDepth == 4) {505 byte[] gray = new byte[16];506 for (int i = 0; i < gray.length; i++) {507 gray[i] = (byte) ((i << 4) | i);508 }509 palette = new IndexColorModel(4, 16, gray, gray, gray);510 } else if (imgDepth == 8) {511 byte[] gray = new byte[256];512 for (int i = 0; i < gray.length; i++) {513 gray[i] = (byte) i;514 }515 palette = new IndexColorModel(8, 256, gray, gray, gray);516 }518 }520 /**521 * Creates a new AVI output stream with the specified video format and522 * framerate.523 *524 * @param out the underlying output stream525 * @param format Selects an encoder for the video format.526 * @exception IllegalArgumentException if videoFormat is null or if527 * framerate is <= 0528 */529 public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {530 if (format == null) {531 throw new IllegalArgumentException("format must not be null");532 }533 this.out = out;534 this.streamOffset = out.getStreamPosition();535 this.videoFormat = format;536 this.videoFrames = new LinkedList<Sample>();537 }539 /**540 * Used with frameRate to specify the time scale that this stream will use.541 * Dividing frameRate by timeScale gives the number of samples per second.542 * For video streams, this is the frame rate. For audio streams, this rate543 * corresponds to the time needed to play nBlockAlign bytes of audio, which544 * for PCM audio is the just the sample rate.545 * <p>546 * The default value is 1.547 *548 * @param newValue549 */550 public void setTimeScale(int newValue) {551 if (newValue <= 0) {552 throw new IllegalArgumentException("timeScale must be greater 0");553 }554 this.timeScale = newValue;555 }557 /**558 * Returns the time scale of this media.559 *560 * @return time scale561 */562 public int getTimeScale() {563 return timeScale;564 }566 /**567 * Sets the rate of video frames in time scale units.568 * <p>569 * The default value is 30. Together with the default value 1 of timeScale570 * this results in 30 frames pers second.571 *572 * @param newValue573 */574 public void setFrameRate(int newValue) {575 if (newValue <= 0) {576 throw new IllegalArgumentException("frameDuration must be greater 0");577 }578 if (state == States.STARTED) {579 throw new IllegalStateException("frameDuration must be set before the first frame is written");580 }581 this.frameRate = newValue;582 }584 /**585 * Returns the frame rate of this media.586 *587 * @return frame rate588 */589 public int getFrameRate() {590 return frameRate;591 }593 /** Sets the global color palette. */594 public void setPalette(IndexColorModel palette) {595 this.palette = palette;596 }598 /**599 * Sets the compression quality of the video track.600 * A value of 0 stands for "high compression is important" a value of601 * 1 for "high image quality is important".602 * <p>603 * Changing this value affects frames which are subsequently written604 * to the AVIOutputStream. Frames which have already been written605 * are not changed.606 * <p>607 * This value has only effect on videos encoded with JPG format.608 * <p>609 * The default value is 0.9.610 *611 * @param newValue612 */613 public void setVideoCompressionQuality(float newValue) {614 this.quality = newValue;615 }617 /**618 * Returns the video compression quality.619 *620 * @return video compression quality621 */622 public float getVideoCompressionQuality() {623 return quality;624 }626 /**627 * Sets the dimension of the video track.628 * <p>629 * You need to explicitly set the dimension, if you add all frames from630 * files or input streams.631 * <p>632 * If you add frames from buffered images, then AVIOutputStream633 * can determine the video dimension from the image width and height.634 *635 * @param width Must be greater than 0.636 * @param height Must be greater than 0.637 */638 public void setVideoDimension(int width, int height) {639 if (width < 1 || height < 1) {640 throw new IllegalArgumentException("width and height must be greater zero.");641 }642 this.imgWidth = width;643 this.imgHeight = height;644 }646 /**647 * Gets the dimension of the video track.648 * <p>649 * Returns null if the dimension is not known.650 */651 public Dimension getVideoDimension() {652 if (imgWidth < 1 || imgHeight < 1) {653 return null;654 }655 return new Dimension(imgWidth, imgHeight);656 }658 /**659 * Sets the state of the QuickTimeOutpuStream to started.660 * <p>661 * If the state is changed by this method, the prolog is662 * written.663 */664 private void ensureStarted() throws IOException {665 if (state != States.STARTED) {666 creationTime = new Date();667 writeProlog();668 state = States.STARTED;669 }670 }672 /**673 * Writes a frame to the video track.674 * <p>675 * If the dimension of the video track has not been specified yet, it676 * is derived from the first buffered image added to the AVIOutputStream.677 *678 * @param image The frame image.679 *680 * @throws IllegalArgumentException if the duration is less than 1, or681 * if the dimension of the frame does not match the dimension of the video682 * track.683 * @throws IOException if writing the image failed.684 */685 public void writeFrame(BufferedImage image) throws IOException {686 ensureOpen();687 ensureStarted();689 // Get the dimensions of the first image690 if (imgWidth == -1) {691 imgWidth = image.getWidth();692 imgHeight = image.getHeight();693 } else {694 // The dimension of the image must match the dimension of the video track695 if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {696 throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()697 + "] (width=" + image.getWidth() + ", height=" + image.getHeight()698 + ") differs from image[0] (width="699 + imgWidth + ", height=" + imgHeight);700 }701 }703 DataChunk videoFrameChunk;704 long offset = getRelativeStreamPosition();705 boolean isSync = true;706 switch (videoFormat) {707 case RAW: {708 switch (imgDepth) {709 case 4: {710 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();711 int[] imgRGBs = new int[16];712 imgPalette.getRGBs(imgRGBs);713 int[] previousRGBs = new int[16];714 if (previousPalette == null) {715 previousPalette = palette;716 }717 previousPalette.getRGBs(previousRGBs);718 if (!Arrays.equals(imgRGBs, previousRGBs)) {719 previousPalette = imgPalette;720 DataChunk paletteChangeChunk = new DataChunk("00pc");721 /*722 int first = imgPalette.getMapSize();723 int last = -1;724 for (int i = 0; i < 16; i++) {725 if (previousRGBs[i] != imgRGBs[i] && i < first) {726 first = i;727 }728 if (previousRGBs[i] != imgRGBs[i] && i > last) {729 last = i;730 }731 }*/732 int first = 0;733 int last = imgPalette.getMapSize() - 1;734 /*735 * typedef struct {736 BYTE bFirstEntry;737 BYTE bNumEntries;738 WORD wFlags;739 PALETTEENTRY peNew[];740 } AVIPALCHANGE;741 *742 * typedef struct tagPALETTEENTRY {743 BYTE peRed;744 BYTE peGreen;745 BYTE peBlue;746 BYTE peFlags;747 } PALETTEENTRY;748 */749 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();750 pOut.writeByte(first);//bFirstEntry751 pOut.writeByte(last - first + 1);//bNumEntries752 pOut.writeShort(0);//wFlags754 for (int i = first; i <= last; i++) {755 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red756 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green757 pOut.writeByte(imgRGBs[i] & 0xff); // blue758 pOut.writeByte(0); // reserved*/759 }761 moviChunk.add(paletteChangeChunk);762 paletteChangeChunk.finish();763 long length = getRelativeStreamPosition() - offset;764 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));765 offset = getRelativeStreamPosition();766 }768 videoFrameChunk = new DataChunk("00db");769 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();770 byte[] rgb4 = new byte[imgWidth / 2];771 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down772 for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {773 rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));774 }775 videoFrameChunk.getOutputStream().write(rgb4);776 }777 break;778 }779 case 8: {780 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();781 int[] imgRGBs = new int[256];782 imgPalette.getRGBs(imgRGBs);783 int[] previousRGBs = new int[256];784 if (previousPalette == null) {785 previousPalette = palette;786 }787 previousPalette.getRGBs(previousRGBs);788 if (!Arrays.equals(imgRGBs, previousRGBs)) {789 previousPalette = imgPalette;790 DataChunk paletteChangeChunk = new DataChunk("00pc");791 /*792 int first = imgPalette.getMapSize();793 int last = -1;794 for (int i = 0; i < 16; i++) {795 if (previousRGBs[i] != imgRGBs[i] && i < first) {796 first = i;797 }798 if (previousRGBs[i] != imgRGBs[i] && i > last) {799 last = i;800 }801 }*/802 int first = 0;803 int last = imgPalette.getMapSize() - 1;804 /*805 * typedef struct {806 BYTE bFirstEntry;807 BYTE bNumEntries;808 WORD wFlags;809 PALETTEENTRY peNew[];810 } AVIPALCHANGE;811 *812 * typedef struct tagPALETTEENTRY {813 BYTE peRed;814 BYTE peGreen;815 BYTE peBlue;816 BYTE peFlags;817 } PALETTEENTRY;818 */819 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();820 pOut.writeByte(first);//bFirstEntry821 pOut.writeByte(last - first + 1);//bNumEntries822 pOut.writeShort(0);//wFlags824 for (int i = first; i <= last; i++) {825 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red826 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green827 pOut.writeByte(imgRGBs[i] & 0xff); // blue828 pOut.writeByte(0); // reserved*/829 }831 moviChunk.add(paletteChangeChunk);832 paletteChangeChunk.finish();833 long length = getRelativeStreamPosition() - offset;834 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));835 offset = getRelativeStreamPosition();836 }838 videoFrameChunk = new DataChunk("00db");839 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();840 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down841 videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);842 }843 break;844 }845 default: {846 videoFrameChunk = new DataChunk("00db");847 WritableRaster raster = image.getRaster();848 int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data849 byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data850 for (int y = imgHeight - 1; y >= 0; --y) { // Upside down851 raster.getPixels(0, y, imgWidth, 1, raw);852 for (int x = 0, n = imgWidth * 3; x < n; x += 3) {853 bytes[x + 2] = (byte) raw[x]; // Blue854 bytes[x + 1] = (byte) raw[x + 1]; // Green855 bytes[x] = (byte) raw[x + 2]; // Red856 }857 videoFrameChunk.getOutputStream().write(bytes);858 }859 break;860 }861 }862 break;863 }865 case JPG: {866 videoFrameChunk = new DataChunk("00dc");867 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();868 ImageWriteParam iwParam = iw.getDefaultWriteParam();869 iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);870 iwParam.setCompressionQuality(quality);871 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());872 iw.setOutput(imgOut);873 IIOImage img = new IIOImage(image, null, null);874 iw.write(null, img, iwParam);875 iw.dispose();876 break;877 }878 case PNG:879 default: {880 videoFrameChunk = new DataChunk("00dc");881 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();882 ImageWriteParam iwParam = iw.getDefaultWriteParam();883 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());884 iw.setOutput(imgOut);885 IIOImage img = new IIOImage(image, null, null);886 iw.write(null, img, iwParam);887 iw.dispose();888 break;889 }890 }891 long length = getRelativeStreamPosition() - offset;892 moviChunk.add(videoFrameChunk);893 videoFrameChunk.finish();895 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));896 if (getRelativeStreamPosition() > 1L << 32) {897 throw new IOException("AVI file is larger than 4 GB");898 }899 }901 /**902 * Writes a frame from a file to the video track.903 * <p>904 * This method does not inspect the contents of the file.905 * For example, Its your responsibility to only add JPG files if you have906 * chosen the JPEG video format.907 * <p>908 * If you add all frames from files or from input streams, then you909 * have to explicitly set the dimension of the video track before you910 * call finish() or close().911 *912 * @param file The file which holds the image data.913 *914 * @throws IllegalStateException if the duration is less than 1.915 * @throws IOException if writing the image failed.916 */917 public void writeFrame(File file) throws IOException {918 FileInputStream in = null;919 try {920 in = new FileInputStream(file);921 writeFrame(in);922 } finally {923 if (in != null) {924 in.close();925 }926 }927 }929 /**930 * Writes a frame to the video track.931 * <p>932 * This method does not inspect the contents of the file.933 * For example, its your responsibility to only add JPG files if you have934 * chosen the JPEG video format.935 * <p>936 * If you add all frames from files or from input streams, then you937 * have to explicitly set the dimension of the video track before you938 * call finish() or close().939 *940 * @param in The input stream which holds the image data.941 *942 * @throws IllegalArgumentException if the duration is less than 1.943 * @throws IOException if writing the image failed.944 */945 public void writeFrame(InputStream in) throws IOException {946 ensureOpen();947 ensureStarted();949 DataChunk videoFrameChunk = new DataChunk(950 videoFormat == VideoFormat.RAW ? "00db" : "00dc");951 moviChunk.add(videoFrameChunk);952 OutputStream mdatOut = videoFrameChunk.getOutputStream();953 long offset = getRelativeStreamPosition();954 byte[] buf = new byte[512];955 int len;956 while ((len = in.read(buf)) != -1) {957 mdatOut.write(buf, 0, len);958 }959 long length = getRelativeStreamPosition() - offset;960 videoFrameChunk.finish();961 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));962 if (getRelativeStreamPosition() > 1L << 32) {963 throw new IOException("AVI file is larger than 4 GB");964 }965 }967 /**968 * Closes the movie file as well as the stream being filtered.969 *970 * @exception IOException if an I/O error has occurred971 */972 public void close() throws IOException {973 if (state == States.STARTED) {974 finish();975 }976 if (state != States.CLOSED) {977 out.close();978 state = States.CLOSED;979 }980 }982 /**983 * Finishes writing the contents of the AVI output stream without closing984 * the underlying stream. Use this method when applying multiple filters985 * in succession to the same output stream.986 *987 * @exception IllegalStateException if the dimension of the video track988 * has not been specified or determined yet.989 * @exception IOException if an I/O exception has occurred990 */991 public void finish() throws IOException {992 ensureOpen();993 if (state != States.FINISHED) {994 if (imgWidth == -1 || imgHeight == -1) {995 throw new IllegalStateException("image width and height must be specified");996 }998 moviChunk.finish();999 writeEpilog();1000 state = States.FINISHED;1001 imgWidth = imgHeight = -1;1002 }1003 }1005 /**1006 * Check to make sure that this stream has not been closed1007 */1008 private void ensureOpen() throws IOException {1009 if (state == States.CLOSED) {1010 throw new IOException("Stream closed");1011 }1012 }1014 /** Gets the position relative to the beginning of the QuickTime stream.1015 * <p>1016 * Usually this value is equal to the stream position of the underlying1017 * ImageOutputStream, but can be larger if the underlying stream already1018 * contained data.1019 *1020 * @return The relative stream position.1021 * @throws IOException1022 */1023 private long getRelativeStreamPosition() throws IOException {1024 return out.getStreamPosition() - streamOffset;1025 }1027 /** Seeks relative to the beginning of the QuickTime stream.1028 * <p>1029 * Usually this equal to seeking in the underlying ImageOutputStream, but1030 * can be different if the underlying stream already contained data.1031 *1032 */1033 private void seekRelative(long newPosition) throws IOException {1034 out.seek(newPosition + streamOffset);1035 }1037 private void writeProlog() throws IOException {1038 // The file has the following structure:1039 //1040 // .RIFF AVI1041 // ..avih (AVI Header Chunk)1042 // ..LIST strl1043 // ...strh (Stream Header Chunk)1044 // ...strf (Stream Format Chunk)1045 // ..LIST movi1046 // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)1047 // ..idx1 (List of video data chunks and their location in the file)1049 // The RIFF AVI Chunk holds the complete movie1050 aviChunk = new CompositeChunk("RIFF", "AVI ");1051 CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");1053 // Write empty AVI Main Header Chunk - we fill the data in later1054 aviChunk.add(hdrlChunk);1055 avihChunk = new FixedSizeDataChunk("avih", 56);1056 avihChunk.seekToEndOfChunk();1057 hdrlChunk.add(avihChunk);1059 CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");1060 hdrlChunk.add(strlChunk);1062 // Write empty AVI Stream Header Chunk - we fill the data in later1063 strhChunk = new FixedSizeDataChunk("strh", 56);1064 strhChunk.seekToEndOfChunk();1065 strlChunk.add(strhChunk);1066 strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);1067 strfChunk.seekToEndOfChunk();1068 strlChunk.add(strfChunk);1070 moviChunk = new CompositeChunk("LIST", "movi");1071 aviChunk.add(moviChunk);1074 }1076 private void writeEpilog() throws IOException {1077 // Compute values1078 int duration = 0;1079 for (Sample s : videoFrames) {1080 duration += s.duration;1081 }1082 long bufferSize = 0;1083 for (Sample s : videoFrames) {1084 if (s.length > bufferSize) {1085 bufferSize = s.length;1086 }1087 }1090 DataChunkOutputStream d;1092 /* Create Idx1 Chunk and write data1093 * -------------1094 typedef struct _avioldindex {1095 FOURCC fcc;1096 DWORD cb;1097 struct _avioldindex_entry {1098 DWORD dwChunkId;1099 DWORD dwFlags;1100 DWORD dwOffset;1101 DWORD dwSize;1102 } aIndex[];1103 } AVIOLDINDEX;1104 */1105 DataChunk idx1Chunk = new DataChunk("idx1");1106 aviChunk.add(idx1Chunk);1107 d = idx1Chunk.getOutputStream();1108 long moviListOffset = moviChunk.offset + 8;1109 //moviListOffset = 0;1110 for (Sample f : videoFrames) {1112 d.writeType(f.chunkType); // dwChunkId1113 // Specifies a FOURCC that identifies a stream in the AVI file. The1114 // FOURCC must have the form 'xxyy' where xx is the stream number and yy1115 // is a two-character code that identifies the contents of the stream:1116 //1117 // Two-character code Description1118 // db Uncompressed video frame1119 // dc Compressed video frame1120 // pc Palette change1121 // wb Audio data1123 d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//1124 | (f.isSync ? 0x10 : 0x0)); // dwFlags1125 // Specifies a bitwise combination of zero or more of the following1126 // flags:1127 //1128 // Value Name Description1129 // 0x10 AVIIF_KEYFRAME The data chunk is a key frame.1130 // 0x1 AVIIF_LIST The data chunk is a 'rec ' list.1131 // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the1132 // stream. For example, this flag should be set for1133 // palette changes.1135 d.writeUInt(f.offset - moviListOffset); // dwOffset1136 // Specifies the location of the data chunk in the file. The value1137 // should be specified as an offset, in bytes, from the start of the1138 // 'movi' list; however, in some AVI files it is given as an offset from1139 // the start of the file.1141 d.writeUInt(f.length); // dwSize1142 // Specifies the size of the data chunk, in bytes.1143 }1144 idx1Chunk.finish();1146 /* Write Data into AVI Main Header Chunk1147 * -------------1148 * The AVIMAINHEADER structure defines global information in an AVI file.1149 * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx1150 typedef struct _avimainheader {1151 FOURCC fcc;1152 DWORD cb;1153 DWORD dwMicroSecPerFrame;1154 DWORD dwMaxBytesPerSec;1155 DWORD dwPaddingGranularity;1156 DWORD dwFlags;1157 DWORD dwTotalFrames;1158 DWORD dwInitialFrames;1159 DWORD dwStreams;1160 DWORD dwSuggestedBufferSize;1161 DWORD dwWidth;1162 DWORD dwHeight;1163 DWORD dwReserved[4];1164 } AVIMAINHEADER; */1165 avihChunk.seekToStartOfData();1166 d = avihChunk.getOutputStream();1168 d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame1169 // Specifies the number of microseconds between frames.1170 // This value indicates the overall timing for the file.1172 d.writeUInt(0); // dwMaxBytesPerSec1173 // Specifies the approximate maximum data rate of the file.1174 // This value indicates the number of bytes per second the system1175 // must handle to present an AVI sequence as specified by the other1176 // parameters contained in the main header and stream header chunks.1178 d.writeUInt(0); // dwPaddingGranularity1179 // Specifies the alignment for data, in bytes. Pad the data to multiples1180 // of this value.1182 d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)1183 // Contains a bitwise combination of zero or more of the following1184 // flags:1185 //1186 // Value Name Description1187 // 0x10 AVIF_HASINDEX Indicates the AVI file has an index.1188 // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the1189 // index, rather than the physical ordering of the1190 // chunks in the file, to determine the order of1191 // presentation of the data. For example, this flag1192 // could be used to create a list of frames for1193 // editing.1194 // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.1195 // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially1196 // allocated file used for capturing real-time1197 // video. Applications should warn the user before1198 // writing over a file with this flag set because1199 // the user probably defragmented this file.1200 // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted1201 // data and software. When this flag is used,1202 // software should not permit the data to be1203 // duplicated.1205 d.writeUInt(videoFrames.size()); // dwTotalFrames1206 // Specifies the total number of frames of data in the file.1208 d.writeUInt(0); // dwInitialFrames1209 // Specifies the initial frame for interleaved files. Noninterleaved1210 // files should specify zero. If you are creating interleaved files,1211 // specify the number of frames in the file prior to the initial frame1212 // of the AVI sequence in this member.1213 // To give the audio driver enough audio to work with, the audio data in1214 // an interleaved file must be skewed from the video data. Typically,1215 // the audio data should be moved forward enough frames to allow1216 // approximately 0.75 seconds of audio data to be preloaded. The1217 // dwInitialRecords member should be set to the number of frames the1218 // audio is skewed. Also set the same value for the dwInitialFrames1219 // member of the AVISTREAMHEADER structure in the audio stream header1221 d.writeUInt(1); // dwStreams1222 // Specifies the number of streams in the file. For example, a file with1223 // audio and video has two streams.1225 d.writeUInt(bufferSize); // dwSuggestedBufferSize1226 // Specifies the suggested buffer size for reading the file. Generally,1227 // this size should be large enough to contain the largest chunk in the1228 // file. If set to zero, or if it is too small, the playback software1229 // will have to reallocate memory during playback, which will reduce1230 // performance. For an interleaved file, the buffer size should be large1231 // enough to read an entire record, and not just a chunk.1234 d.writeUInt(imgWidth); // dwWidth1235 // Specifies the width of the AVI file in pixels.1237 d.writeUInt(imgHeight); // dwHeight1238 // Specifies the height of the AVI file in pixels.1240 d.writeUInt(0); // dwReserved[0]1241 d.writeUInt(0); // dwReserved[1]1242 d.writeUInt(0); // dwReserved[2]1243 d.writeUInt(0); // dwReserved[3]1244 // Reserved. Set this array to zero.1246 /* Write Data into AVI Stream Header Chunk1247 * -------------1248 * The AVISTREAMHEADER structure contains information about one stream1249 * in an AVI file.1250 * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx1251 typedef struct _avistreamheader {1252 FOURCC fcc;1253 DWORD cb;1254 FOURCC fccType;1255 FOURCC fccHandler;1256 DWORD dwFlags;1257 WORD wPriority;1258 WORD wLanguage;1259 DWORD dwInitialFrames;1260 DWORD dwScale;1261 DWORD dwRate;1262 DWORD dwStart;1263 DWORD dwLength;1264 DWORD dwSuggestedBufferSize;1265 DWORD dwQuality;1266 DWORD dwSampleSize;1267 struct {1268 short int left;1269 short int top;1270 short int right;1271 short int bottom;1272 } rcFrame;1273 } AVISTREAMHEADER;1274 */1275 strhChunk.seekToStartOfData();1276 d = strhChunk.getOutputStream();1277 d.writeType("vids"); // fccType - vids for video stream1278 // Contains a FOURCC that specifies the type of the data contained in1279 // the stream. The following standard AVI values for video and audio are1280 // defined:1281 //1282 // FOURCC Description1283 // 'auds' Audio stream1284 // 'mids' MIDI stream1285 // 'txts' Text stream1286 // 'vids' Video stream1288 switch (videoFormat) {1289 case RAW:1290 d.writeType("DIB "); // fccHandler - DIB for Raw RGB1291 break;1292 case RLE:1293 d.writeType("RLE "); // fccHandler - Microsoft RLE1294 break;1295 case JPG:1296 d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG1297 break;1298 case PNG:1299 default:1300 d.writeType("png "); // fccHandler - png for PNG1301 break;1302 }1303 // Optionally, contains a FOURCC that identifies a specific data1304 // handler. The data handler is the preferred handler for the stream.1305 // For audio and video streams, this specifies the codec for decoding1306 // the stream.1308 if (imgDepth <= 8) {1309 d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES1310 } else {1311 d.writeUInt(0); // dwFlags1312 }1314 // Contains any flags for the data stream. The bits in the high-order1315 // word of these flags are specific to the type of data contained in the1316 // stream. The following standard flags are defined:1317 //1318 // Value Name Description1319 // AVISF_DISABLED 0x00000001 Indicates this stream should not1320 // be enabled by default.1321 // AVISF_VIDEO_PALCHANGES 0x000100001322 // Indicates this video stream contains1323 // palette changes. This flag warns the playback1324 // software that it will need to animate the1325 // palette.1327 d.writeUShort(0); // wPriority1328 // Specifies priority of a stream type. For example, in a file with1329 // multiple audio streams, the one with the highest priority might be1330 // the default stream.1332 d.writeUShort(0); // wLanguage1333 // Language tag.1335 d.writeUInt(0); // dwInitialFrames1336 // Specifies how far audio data is skewed ahead of the video frames in1337 // interleaved files. Typically, this is about 0.75 seconds. If you are1338 // creating interleaved files, specify the number of frames in the file1339 // prior to the initial frame of the AVI sequence in this member. For1340 // more information, see the remarks for the dwInitialFrames member of1341 // the AVIMAINHEADER structure.1343 d.writeUInt(timeScale); // dwScale1344 // Used with dwRate to specify the time scale that this stream will use.1345 // Dividing dwRate by dwScale gives the number of samples per second.1346 // For video streams, this is the frame rate. For audio streams, this1347 // rate corresponds to the time needed to play nBlockAlign bytes of1348 // audio, which for PCM audio is the just the sample rate.1350 d.writeUInt(frameRate); // dwRate1351 // See dwScale.1353 d.writeUInt(0); // dwStart1354 // Specifies the starting time for this stream. The units are defined by1355 // the dwRate and dwScale members in the main file header. Usually, this1356 // is zero, but it can specify a delay time for a stream that does not1357 // start concurrently with the file.1359 d.writeUInt(videoFrames.size()); // dwLength1360 // Specifies the length of this stream. The units are defined by the1361 // dwRate and dwScale members of the stream's header.1363 d.writeUInt(bufferSize); // dwSuggestedBufferSize1364 // Specifies how large a buffer should be used to read this stream.1365 // Typically, this contains a value corresponding to the largest chunk1366 // present in the stream. Using the correct buffer size makes playback1367 // more efficient. Use zero if you do not know the correct buffer size.1369 d.writeInt(-1); // dwQuality1370 // Specifies an indicator of the quality of the data in the stream.1371 // Quality is represented as a number between 0 and 10,000.1372 // For compressed data, this typically represents the value of the1373 // quality parameter passed to the compression software. If set to –1,1374 // drivers use the default quality value.1376 d.writeUInt(0); // dwSampleSize1377 // Specifies the size of a single sample of data. This is set to zero1378 // if the samples can vary in size. If this number is nonzero, then1379 // multiple samples of data can be grouped into a single chunk within1380 // the file. If it is zero, each sample of data (such as a video frame)1381 // must be in a separate chunk. For video streams, this number is1382 // typically zero, although it can be nonzero if all video frames are1383 // the same size. For audio streams, this number should be the same as1384 // the nBlockAlign member of the WAVEFORMATEX structure describing the1385 // audio.1387 d.writeUShort(0); // rcFrame.left1388 d.writeUShort(0); // rcFrame.top1389 d.writeUShort(imgWidth); // rcFrame.right1390 d.writeUShort(imgHeight); // rcFrame.bottom1391 // Specifies the destination rectangle for a text or video stream within1392 // the movie rectangle specified by the dwWidth and dwHeight members of1393 // the AVI main header structure. The rcFrame member is typically used1394 // in support of multiple video streams. Set this rectangle to the1395 // coordinates corresponding to the movie rectangle to update the whole1396 // movie rectangle. Units for this member are pixels. The upper-left1397 // corner of the destination rectangle is relative to the upper-left1398 // corner of the movie rectangle.1400 /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk1401 /* -------------1402 * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx1403 typedef struct tagBITMAPINFOHEADER {1404 DWORD biSize;1405 LONG biWidth;1406 LONG biHeight;1407 WORD biPlanes;1408 WORD biBitCount;1409 DWORD biCompression;1410 DWORD biSizeImage;1411 LONG biXPelsPerMeter;1412 LONG biYPelsPerMeter;1413 DWORD biClrUsed;1414 DWORD biClrImportant;1415 } BITMAPINFOHEADER;1416 */1417 strfChunk.seekToStartOfData();1418 d = strfChunk.getOutputStream();1419 d.writeUInt(40); // biSize1420 // Specifies the number of bytes required by the structure. This value1421 // does not include the size of the color table or the size of the color1422 // masks, if they are appended to the end of structure.1424 d.writeInt(imgWidth); // biWidth1425 // Specifies the width of the bitmap, in pixels.1427 d.writeInt(imgHeight); // biHeight1428 // Specifies the height of the bitmap, in pixels.1429 //1430 // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is1431 // a bottom-up DIB with the origin at the lower left corner. If biHeight1432 // is negative, the bitmap is a top-down DIB with the origin at the1433 // upper left corner.1434 // For YUV bitmaps, the bitmap is always top-down, regardless of the1435 // sign of biHeight. Decoders should offer YUV formats with postive1436 // biHeight, but for backward compatibility they should accept YUV1437 // formats with either positive or negative biHeight.1438 // For compressed formats, biHeight must be positive, regardless of1439 // image orientation.1441 d.writeShort(1); // biPlanes1442 // Specifies the number of planes for the target device. This value must1443 // be set to 1.1445 d.writeShort(imgDepth); // biBitCount1446 // Specifies the number of bits per pixel (bpp). For uncompressed1447 // formats, this value is the average number of bits per pixel. For1448 // compressed formats, this value is the implied bit depth of the1449 // uncompressed image, after the image has been decoded.1451 switch (videoFormat) {1452 case RAW:1453 default:1454 d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB1455 break;1456 case RLE:1457 if (imgDepth == 8) {1458 d.writeInt(1); // biCompression - BI_RLE81459 } else if (imgDepth == 4) {1460 d.writeInt(2); // biCompression - BI_RLE41461 } else {1462 throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");1463 }1464 break;1465 case JPG:1466 d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG1467 break;1468 case PNG:1469 d.writeType("png "); // biCompression - png for PNG1470 break;1471 }1472 // For compressed video and YUV formats, this member is a FOURCC code,1473 // specified as a DWORD in little-endian order. For example, YUYV video1474 // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC1475 // Codes.1476 //1477 // For uncompressed RGB formats, the following values are possible:1478 //1479 // Value Description1480 // BI_RGB 0x00000000 Uncompressed RGB.1481 // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.1482 // Valid for 16-bpp and 32-bpp bitmaps.1483 //1484 // Note that BI_JPG and BI_PNG are not valid video formats.1485 //1486 // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is1487 // always RGB 555. If biCompression equals BI_BITFIELDS, the format is1488 // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE1489 // structure to determine the specific RGB type.1491 switch (videoFormat) {1492 case RAW:1493 d.writeInt(0); // biSizeImage1494 break;1495 case RLE:1496 case JPG:1497 case PNG:1498 default:1499 if (imgDepth == 4) {1500 d.writeInt(imgWidth * imgHeight / 2); // biSizeImage1501 } else {1502 int bytesPerPixel = Math.max(1, imgDepth / 8);1503 d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage1504 }1505 break;1506 }1507 // Specifies the size, in bytes, of the image. This can be set to 0 for1508 // uncompressed RGB bitmaps.1510 d.writeInt(0); // biXPelsPerMeter1511 // Specifies the horizontal resolution, in pixels per meter, of the1512 // target device for the bitmap.1514 d.writeInt(0); // biYPelsPerMeter1515 // Specifies the vertical resolution, in pixels per meter, of the target1516 // device for the bitmap.1518 d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed1519 // Specifies the number of color indices in the color table that are1520 // actually used by the bitmap.1522 d.writeInt(0); // biClrImportant1523 // Specifies the number of color indices that are considered important1524 // for displaying the bitmap. If this value is zero, all colors are1525 // important.1527 if (palette != null) {1528 for (int i = 0, n = palette.getMapSize(); i < n; ++i) {1529 /*1530 * typedef struct tagRGBQUAD {1531 BYTE rgbBlue;1532 BYTE rgbGreen;1533 BYTE rgbRed;1534 BYTE rgbReserved; // This member is reserved and must be zero.1535 } RGBQUAD;1536 */1537 d.write(palette.getBlue(i));1538 d.write(palette.getGreen(i));1539 d.write(palette.getRed(i));1540 d.write(0);1541 }1542 }1545 // -----------------1546 aviChunk.finish();1547 }1548 }