rlm@9: /** rlm@9: * @(#)AVIOutputStream.java 1.5.1 2011-01-17 rlm@9: * rlm@9: * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. rlm@9: * All rights reserved. rlm@9: * rlm@9: * You may not use, copy or modify this file, except in compliance with the rlm@9: * license agreement you entered into with Werner Randelshofer. rlm@9: * For details see accompanying license terms. rlm@9: */ rlm@9: package com.aurellem.capture.video; rlm@9: rlm@9: import java.awt.Dimension; rlm@9: import java.awt.image.BufferedImage; rlm@9: import java.awt.image.DataBufferByte; rlm@9: import java.awt.image.IndexColorModel; rlm@9: import java.awt.image.WritableRaster; rlm@9: import java.io.File; rlm@9: import java.io.FileInputStream; rlm@9: import java.io.IOException; rlm@9: import java.io.InputStream; rlm@9: import java.io.OutputStream; rlm@9: import java.util.Arrays; rlm@9: import java.util.Date; rlm@9: import java.util.LinkedList; rlm@9: rlm@9: import javax.imageio.IIOImage; rlm@9: import javax.imageio.ImageIO; rlm@9: import javax.imageio.ImageWriteParam; rlm@9: import javax.imageio.ImageWriter; rlm@9: import javax.imageio.stream.FileImageOutputStream; rlm@9: import javax.imageio.stream.ImageOutputStream; rlm@9: import javax.imageio.stream.MemoryCacheImageOutputStream; rlm@9: rlm@9: /** rlm@9: * This class supports writing of images into an AVI 1.0 video file. rlm@9: *

rlm@9: * The images are written as video frames. rlm@9: *

rlm@9: * Video frames can be encoded with one of the following formats: rlm@9: *

rlm@9: * All frames must have the same format. rlm@9: * When JPG is used each frame can have an individual encoding quality. rlm@9: *

rlm@9: * All frames in an AVI file must have the same duration. The duration can rlm@9: * be set by setting an appropriate pair of values using methods rlm@9: * {@link #setFrameRate} and {@link #setTimeScale}. rlm@9: *

rlm@9: * The length of an AVI 1.0 file is limited to 1 GB. rlm@9: * This class supports lengths of up to 4 GB, but such files may not work on rlm@9: * all players. rlm@9: *

rlm@9: * For detailed information about the AVI RIFF file format see:
rlm@9: * msdn.microsoft.com AVI RIFF
rlm@9: * www.microsoft.com FOURCC for Video Compression
rlm@9: * www.saettler.com RIFF
rlm@9: * rlm@9: * @author Werner Randelshofer rlm@9: * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. rlm@9: *
1.5 2011-01-06 Adds support for RLE 8-bit video format. rlm@9: *
1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets rlm@9: * in "idx1" chunk. rlm@9: *
1.3.2 2010-12-27 File size limit is 1 GB. rlm@9: *
1.3.1 2010-07-19 Fixes seeking and calculation of offsets. rlm@9: *
1.3 2010-07-08 Adds constructor with ImageOutputStream. rlm@9: * Added method getVideoDimension(). rlm@9: *
1.2 2009-08-29 Adds support for RAW video format. rlm@9: *
1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih rlm@9: * chunk. Changed the API to reflect that AVI works with frame rates instead of rlm@9: * with frame durations. rlm@9: *
1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG rlm@9: * encoded video. rlm@9: *
1.0 2008-08-11 Created. rlm@9: */ rlm@9: public class AVIOutputStream { rlm@9: rlm@9: /** rlm@9: * Underlying output stream. rlm@9: */ rlm@9: private ImageOutputStream out; rlm@9: /** The offset of the QuickTime stream in the underlying ImageOutputStream. rlm@9: * Normally this is 0 unless the underlying stream already contained data rlm@9: * when it was passed to the constructor. rlm@9: */ rlm@9: private long streamOffset; rlm@9: /** Previous frame for delta compression. */ rlm@9: private Object previousData; rlm@9: rlm@9: /** rlm@9: * Supported video encodings. rlm@9: */ rlm@9: public static enum VideoFormat { rlm@9: rlm@9: RAW, RLE, JPG, PNG; rlm@9: } rlm@9: /** rlm@9: * Current video formats. rlm@9: */ rlm@9: private VideoFormat videoFormat; rlm@9: /** rlm@9: * Quality of JPEG encoded video frames. rlm@9: */ rlm@9: private float quality = 0.9f; rlm@9: /** rlm@9: * Creation time of the movie output stream. rlm@9: */ rlm@9: private Date creationTime; rlm@9: /** rlm@9: * Width of the video frames. All frames must have the same width. rlm@9: * The value -1 is used to mark unspecified width. rlm@9: */ rlm@9: private int imgWidth = -1; rlm@9: /** rlm@9: * Height of the video frames. All frames must have the same height. rlm@9: * The value -1 is used to mark unspecified height. rlm@9: */ rlm@9: private int imgHeight = -1; rlm@9: /** Number of bits per pixel. */ rlm@9: private int imgDepth = 24; rlm@9: /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ rlm@9: private IndexColorModel palette; rlm@9: private IndexColorModel previousPalette; rlm@9: /** Video encoder. */ rlm@9: rlm@9: /** rlm@9: * The timeScale of the movie. rlm@9: *

rlm@9: * Used with frameRate to specify the time scale that this stream will use. rlm@9: * Dividing frameRate by timeScale gives the number of samples per second. rlm@9: * For video streams, this is the frame rate. For audio streams, this rate rlm@9: * corresponds to the time needed to play nBlockAlign bytes of audio, which rlm@9: * for PCM audio is the just the sample rate. rlm@9: */ rlm@9: private int timeScale = 1; rlm@9: /** rlm@9: * The frameRate of the movie in timeScale units. rlm@9: *

rlm@9: * @see timeScale rlm@9: */ rlm@9: private int frameRate = 30; rlm@9: /** Interval between keyframes. */ rlm@9: private int syncInterval = 30; rlm@9: rlm@9: /** rlm@9: * The states of the movie output stream. rlm@9: */ rlm@9: private static enum States { rlm@9: rlm@9: STARTED, FINISHED, CLOSED; rlm@9: } rlm@9: /** rlm@9: * The current state of the movie output stream. rlm@9: */ rlm@9: private States state = States.FINISHED; rlm@9: rlm@9: /** rlm@9: * AVI stores media data in samples. rlm@9: * A sample is a single element in a sequence of time-ordered data. rlm@9: */ rlm@9: private static class Sample { rlm@9: rlm@9: String chunkType; rlm@9: /** Offset of the sample relative to the start of the AVI file. rlm@9: */ rlm@9: long offset; rlm@9: /** Data length of the sample. */ rlm@9: long length; rlm@9: /** rlm@9: * The duration of the sample in time scale units. rlm@9: */ rlm@9: int duration; rlm@9: /** Whether the sample is a sync-sample. */ rlm@9: boolean isSync; rlm@9: rlm@9: /** rlm@9: * Creates a new sample. rlm@9: * @param duration rlm@9: * @param offset rlm@9: * @param length rlm@9: */ rlm@9: public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { rlm@9: this.chunkType = chunkId; rlm@9: this.duration = duration; rlm@9: this.offset = offset; rlm@9: this.length = length; rlm@9: this.isSync = isSync; rlm@9: } rlm@9: } rlm@9: /** rlm@9: * List of video frames. rlm@9: */ rlm@9: private LinkedList videoFrames; rlm@9: /** rlm@9: * This chunk holds the whole AVI content. rlm@9: */ rlm@9: private CompositeChunk aviChunk; rlm@9: /** rlm@9: * This chunk holds the movie frames. rlm@9: */ rlm@9: private CompositeChunk moviChunk; rlm@9: /** rlm@9: * This chunk holds the AVI Main Header. rlm@9: */ rlm@9: FixedSizeDataChunk avihChunk; rlm@9: /** rlm@9: * This chunk holds the AVI Stream Header. rlm@9: */ rlm@9: FixedSizeDataChunk strhChunk; rlm@9: /** rlm@9: * This chunk holds the AVI Stream Format Header. rlm@9: */ rlm@9: FixedSizeDataChunk strfChunk; rlm@9: rlm@9: /** rlm@9: * Chunk base class. rlm@9: */ rlm@9: private abstract class Chunk { rlm@9: rlm@9: /** rlm@9: * The chunkType of the chunk. A String with the length of 4 characters. rlm@9: */ rlm@9: protected String chunkType; rlm@9: /** rlm@9: * The offset of the chunk relative to the start of the rlm@9: * ImageOutputStream. rlm@9: */ rlm@9: protected long offset; rlm@9: rlm@9: /** rlm@9: * Creates a new Chunk at the current position of the ImageOutputStream. rlm@9: * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. rlm@9: */ rlm@9: public Chunk(String chunkType) throws IOException { rlm@9: this.chunkType = chunkType; rlm@9: offset = getRelativeStreamPosition(); rlm@9: } rlm@9: rlm@9: /** rlm@9: * Writes the chunk to the ImageOutputStream and disposes it. rlm@9: */ rlm@9: public abstract void finish() throws IOException; rlm@9: rlm@9: /** rlm@9: * Returns the size of the chunk including the size of the chunk header. rlm@9: * @return The size of the chunk. rlm@9: */ rlm@9: public abstract long size(); rlm@9: } rlm@9: rlm@9: /** rlm@9: * A CompositeChunk contains an ordered list of Chunks. rlm@9: */ rlm@9: private class CompositeChunk extends Chunk { rlm@9: rlm@9: /** rlm@9: * The type of the composite. A String with the length of 4 characters. rlm@9: */ rlm@9: protected String compositeType; rlm@9: private LinkedList children; rlm@9: private boolean finished; rlm@9: rlm@9: /** rlm@9: * Creates a new CompositeChunk at the current position of the rlm@9: * ImageOutputStream. rlm@9: * @param compositeType The type of the composite. rlm@9: * @param chunkType The type of the chunk. rlm@9: */ rlm@9: public CompositeChunk(String compositeType, String chunkType) throws IOException { rlm@9: super(chunkType); rlm@9: this.compositeType = compositeType; rlm@9: //out.write rlm@9: out.writeLong(0); // make room for the chunk header rlm@9: out.writeInt(0); // make room for the chunk header rlm@9: children = new LinkedList(); rlm@9: } rlm@9: rlm@9: public void add(Chunk child) throws IOException { rlm@9: if (children.size() > 0) { rlm@9: children.getLast().finish(); rlm@9: } rlm@9: children.add(child); rlm@9: } rlm@9: rlm@9: /** rlm@9: * Writes the chunk and all its children to the ImageOutputStream rlm@9: * and disposes of all resources held by the chunk. rlm@9: * @throws java.io.IOException rlm@9: */ rlm@9: @Override rlm@9: public void finish() throws IOException { rlm@9: if (!finished) { rlm@9: if (size() > 0xffffffffL) { rlm@9: throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); rlm@9: } rlm@9: rlm@9: long pointer = getRelativeStreamPosition(); rlm@9: seekRelative(offset); rlm@9: rlm@9: DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); rlm@9: headerData.writeType(compositeType); rlm@9: headerData.writeUInt(size() - 8); rlm@9: headerData.writeType(chunkType); rlm@9: for (Chunk child : children) { rlm@9: child.finish(); rlm@9: } rlm@9: seekRelative(pointer); rlm@9: if (size() % 2 == 1) { rlm@9: out.writeByte(0); // write pad byte rlm@9: } rlm@9: finished = true; rlm@9: } rlm@9: } rlm@9: rlm@9: @Override rlm@9: public long size() { rlm@9: long length = 12; rlm@9: for (Chunk child : children) { rlm@9: length += child.size() + child.size() % 2; rlm@9: } rlm@9: return length; rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Data Chunk. rlm@9: */ rlm@9: private class DataChunk extends Chunk { rlm@9: rlm@9: private DataChunkOutputStream data; rlm@9: private boolean finished; rlm@9: rlm@9: /** rlm@9: * Creates a new DataChunk at the current position of the rlm@9: * ImageOutputStream. rlm@9: * @param chunkType The chunkType of the chunk. rlm@9: */ rlm@9: public DataChunk(String name) throws IOException { rlm@9: super(name); rlm@9: out.writeLong(0); // make room for the chunk header rlm@9: data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); rlm@9: } rlm@9: rlm@9: public DataChunkOutputStream getOutputStream() { rlm@9: if (finished) { rlm@9: throw new IllegalStateException("DataChunk is finished"); rlm@9: } rlm@9: return data; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Returns the offset of this chunk to the beginning of the random access file rlm@9: * @return rlm@9: */ rlm@9: public long getOffset() { rlm@9: return offset; rlm@9: } rlm@9: rlm@9: @Override rlm@9: public void finish() throws IOException { rlm@9: if (!finished) { rlm@9: long sizeBefore = size(); rlm@9: rlm@9: if (size() > 0xffffffffL) { rlm@9: throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); rlm@9: } rlm@9: rlm@9: long pointer = getRelativeStreamPosition(); rlm@9: seekRelative(offset); rlm@9: rlm@9: DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); rlm@9: headerData.writeType(chunkType); rlm@9: headerData.writeUInt(size() - 8); rlm@9: seekRelative(pointer); rlm@9: if (size() % 2 == 1) { rlm@9: out.writeByte(0); // write pad byte rlm@9: } rlm@9: finished = true; rlm@9: long sizeAfter = size(); rlm@9: if (sizeBefore != sizeAfter) { rlm@9: System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); rlm@9: } rlm@9: } rlm@9: } rlm@9: rlm@9: @Override rlm@9: public long size() { rlm@9: return 8 + data.size(); rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * A DataChunk with a fixed size. rlm@9: */ rlm@9: private class FixedSizeDataChunk extends Chunk { rlm@9: rlm@9: private DataChunkOutputStream data; rlm@9: private boolean finished; rlm@9: private long fixedSize; rlm@9: rlm@9: /** rlm@9: * Creates a new DataChunk at the current position of the rlm@9: * ImageOutputStream. rlm@9: * @param chunkType The chunkType of the chunk. rlm@9: */ rlm@9: public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { rlm@9: super(chunkType); rlm@9: this.fixedSize = fixedSize; rlm@9: data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); rlm@9: data.writeType(chunkType); rlm@9: data.writeUInt(fixedSize); rlm@9: data.clearCount(); rlm@9: rlm@9: // Fill fixed size with nulls rlm@9: byte[] buf = new byte[(int) Math.min(512, fixedSize)]; rlm@9: long written = 0; rlm@9: while (written < fixedSize) { rlm@9: data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); rlm@9: written += Math.min(buf.length, fixedSize - written); rlm@9: } rlm@9: if (fixedSize % 2 == 1) { rlm@9: out.writeByte(0); // write pad byte rlm@9: } rlm@9: seekToStartOfData(); rlm@9: } rlm@9: rlm@9: public DataChunkOutputStream getOutputStream() { rlm@9: /*if (finished) { rlm@9: throw new IllegalStateException("DataChunk is finished"); rlm@9: }*/ rlm@9: return data; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Returns the offset of this chunk to the beginning of the random access file rlm@9: * @return rlm@9: */ rlm@9: public long getOffset() { rlm@9: return offset; rlm@9: } rlm@9: rlm@9: public void seekToStartOfData() throws IOException { rlm@9: seekRelative(offset + 8); rlm@9: data.clearCount(); rlm@9: } rlm@9: rlm@9: public void seekToEndOfChunk() throws IOException { rlm@9: seekRelative(offset + 8 + fixedSize + fixedSize % 2); rlm@9: } rlm@9: rlm@9: @Override rlm@9: public void finish() throws IOException { rlm@9: if (!finished) { rlm@9: finished = true; rlm@9: } rlm@9: } rlm@9: rlm@9: @Override rlm@9: public long size() { rlm@9: return 8 + fixedSize; rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Creates a new AVI file with the specified video format and rlm@9: * frame rate. The video has 24 bits per pixel. rlm@9: * rlm@9: * @param file the output file rlm@9: * @param format Selects an encoder for the video format. rlm@9: * @param bitsPerPixel the number of bits per pixel. rlm@9: * @exception IllegalArgumentException if videoFormat is null or if rlm@9: * frame rate is <= 0 rlm@9: */ rlm@9: public AVIOutputStream(File file, VideoFormat format) throws IOException { rlm@9: this(file,format,24); rlm@9: } rlm@9: /** rlm@9: * Creates a new AVI file with the specified video format and rlm@9: * frame rate. rlm@9: * rlm@9: * @param file the output file rlm@9: * @param format Selects an encoder for the video format. rlm@9: * @param bitsPerPixel the number of bits per pixel. rlm@9: * @exception IllegalArgumentException if videoFormat is null or if rlm@9: * frame rate is <= 0 rlm@9: */ rlm@9: public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { rlm@9: if (format == null) { rlm@9: throw new IllegalArgumentException("format must not be null"); rlm@9: } rlm@9: rlm@9: if (file.exists()) { rlm@9: file.delete(); rlm@9: } rlm@9: this.out = new FileImageOutputStream(file); rlm@9: this.streamOffset = 0; rlm@9: this.videoFormat = format; rlm@9: this.videoFrames = new LinkedList(); rlm@9: this.imgDepth = bitsPerPixel; rlm@9: if (imgDepth == 4) { rlm@9: byte[] gray = new byte[16]; rlm@9: for (int i = 0; i < gray.length; i++) { rlm@9: gray[i] = (byte) ((i << 4) | i); rlm@9: } rlm@9: palette = new IndexColorModel(4, 16, gray, gray, gray); rlm@9: } else if (imgDepth == 8) { rlm@9: byte[] gray = new byte[256]; rlm@9: for (int i = 0; i < gray.length; i++) { rlm@9: gray[i] = (byte) i; rlm@9: } rlm@9: palette = new IndexColorModel(8, 256, gray, gray, gray); rlm@9: } rlm@9: rlm@9: } rlm@9: rlm@9: /** rlm@9: * Creates a new AVI output stream with the specified video format and rlm@9: * framerate. rlm@9: * rlm@9: * @param out the underlying output stream rlm@9: * @param format Selects an encoder for the video format. rlm@9: * @exception IllegalArgumentException if videoFormat is null or if rlm@9: * framerate is <= 0 rlm@9: */ rlm@9: public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { rlm@9: if (format == null) { rlm@9: throw new IllegalArgumentException("format must not be null"); rlm@9: } rlm@9: this.out = out; rlm@9: this.streamOffset = out.getStreamPosition(); rlm@9: this.videoFormat = format; rlm@9: this.videoFrames = new LinkedList(); rlm@9: } rlm@9: rlm@9: /** rlm@9: * Used with frameRate to specify the time scale that this stream will use. rlm@9: * Dividing frameRate by timeScale gives the number of samples per second. rlm@9: * For video streams, this is the frame rate. For audio streams, this rate rlm@9: * corresponds to the time needed to play nBlockAlign bytes of audio, which rlm@9: * for PCM audio is the just the sample rate. rlm@9: *

rlm@9: * The default value is 1. rlm@9: * rlm@9: * @param newValue rlm@9: */ rlm@9: public void setTimeScale(int newValue) { rlm@9: if (newValue <= 0) { rlm@9: throw new IllegalArgumentException("timeScale must be greater 0"); rlm@9: } rlm@9: this.timeScale = newValue; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Returns the time scale of this media. rlm@9: * rlm@9: * @return time scale rlm@9: */ rlm@9: public int getTimeScale() { rlm@9: return timeScale; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Sets the rate of video frames in time scale units. rlm@9: *

rlm@9: * The default value is 30. Together with the default value 1 of timeScale rlm@9: * this results in 30 frames pers second. rlm@9: * rlm@9: * @param newValue rlm@9: */ rlm@9: public void setFrameRate(int newValue) { rlm@9: if (newValue <= 0) { rlm@9: throw new IllegalArgumentException("frameDuration must be greater 0"); rlm@9: } rlm@9: if (state == States.STARTED) { rlm@9: throw new IllegalStateException("frameDuration must be set before the first frame is written"); rlm@9: } rlm@9: this.frameRate = newValue; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Returns the frame rate of this media. rlm@9: * rlm@9: * @return frame rate rlm@9: */ rlm@9: public int getFrameRate() { rlm@9: return frameRate; rlm@9: } rlm@9: rlm@9: /** Sets the global color palette. */ rlm@9: public void setPalette(IndexColorModel palette) { rlm@9: this.palette = palette; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Sets the compression quality of the video track. rlm@9: * A value of 0 stands for "high compression is important" a value of rlm@9: * 1 for "high image quality is important". rlm@9: *

rlm@9: * Changing this value affects frames which are subsequently written rlm@9: * to the AVIOutputStream. Frames which have already been written rlm@9: * are not changed. rlm@9: *

rlm@9: * This value has only effect on videos encoded with JPG format. rlm@9: *

rlm@9: * The default value is 0.9. rlm@9: * rlm@9: * @param newValue rlm@9: */ rlm@9: public void setVideoCompressionQuality(float newValue) { rlm@9: this.quality = newValue; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Returns the video compression quality. rlm@9: * rlm@9: * @return video compression quality rlm@9: */ rlm@9: public float getVideoCompressionQuality() { rlm@9: return quality; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Sets the dimension of the video track. rlm@9: *

rlm@9: * You need to explicitly set the dimension, if you add all frames from rlm@9: * files or input streams. rlm@9: *

rlm@9: * If you add frames from buffered images, then AVIOutputStream rlm@9: * can determine the video dimension from the image width and height. rlm@9: * rlm@9: * @param width Must be greater than 0. rlm@9: * @param height Must be greater than 0. rlm@9: */ rlm@9: public void setVideoDimension(int width, int height) { rlm@9: if (width < 1 || height < 1) { rlm@9: throw new IllegalArgumentException("width and height must be greater zero."); rlm@9: } rlm@9: this.imgWidth = width; rlm@9: this.imgHeight = height; rlm@9: } rlm@9: rlm@9: /** rlm@9: * Gets the dimension of the video track. rlm@9: *

rlm@9: * Returns null if the dimension is not known. rlm@9: */ rlm@9: public Dimension getVideoDimension() { rlm@9: if (imgWidth < 1 || imgHeight < 1) { rlm@9: return null; rlm@9: } rlm@9: return new Dimension(imgWidth, imgHeight); rlm@9: } rlm@9: rlm@9: /** rlm@9: * Sets the state of the QuickTimeOutpuStream to started. rlm@9: *

rlm@9: * If the state is changed by this method, the prolog is rlm@9: * written. rlm@9: */ rlm@9: private void ensureStarted() throws IOException { rlm@9: if (state != States.STARTED) { rlm@9: creationTime = new Date(); rlm@9: writeProlog(); rlm@9: state = States.STARTED; rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Writes a frame to the video track. rlm@9: *

rlm@9: * If the dimension of the video track has not been specified yet, it rlm@9: * is derived from the first buffered image added to the AVIOutputStream. rlm@9: * rlm@9: * @param image The frame image. rlm@9: * rlm@9: * @throws IllegalArgumentException if the duration is less than 1, or rlm@9: * if the dimension of the frame does not match the dimension of the video rlm@9: * track. rlm@9: * @throws IOException if writing the image failed. rlm@9: */ rlm@9: public void writeFrame(BufferedImage image) throws IOException { rlm@9: ensureOpen(); rlm@9: ensureStarted(); rlm@9: rlm@9: // Get the dimensions of the first image rlm@9: if (imgWidth == -1) { rlm@9: imgWidth = image.getWidth(); rlm@9: imgHeight = image.getHeight(); rlm@9: } else { rlm@9: // The dimension of the image must match the dimension of the video track rlm@9: if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { rlm@9: throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() rlm@9: + "] (width=" + image.getWidth() + ", height=" + image.getHeight() rlm@9: + ") differs from image[0] (width=" rlm@9: + imgWidth + ", height=" + imgHeight); rlm@9: } rlm@9: } rlm@9: rlm@9: DataChunk videoFrameChunk; rlm@9: long offset = getRelativeStreamPosition(); rlm@9: boolean isSync = true; rlm@9: switch (videoFormat) { rlm@9: case RAW: { rlm@9: switch (imgDepth) { rlm@9: case 4: { rlm@9: IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); rlm@9: int[] imgRGBs = new int[16]; rlm@9: imgPalette.getRGBs(imgRGBs); rlm@9: int[] previousRGBs = new int[16]; rlm@9: if (previousPalette == null) { rlm@9: previousPalette = palette; rlm@9: } rlm@9: previousPalette.getRGBs(previousRGBs); rlm@9: if (!Arrays.equals(imgRGBs, previousRGBs)) { rlm@9: previousPalette = imgPalette; rlm@9: DataChunk paletteChangeChunk = new DataChunk("00pc"); rlm@9: /* rlm@9: int first = imgPalette.getMapSize(); rlm@9: int last = -1; rlm@9: for (int i = 0; i < 16; i++) { rlm@9: if (previousRGBs[i] != imgRGBs[i] && i < first) { rlm@9: first = i; rlm@9: } rlm@9: if (previousRGBs[i] != imgRGBs[i] && i > last) { rlm@9: last = i; rlm@9: } rlm@9: }*/ rlm@9: int first = 0; rlm@9: int last = imgPalette.getMapSize() - 1; rlm@9: /* rlm@9: * typedef struct { rlm@9: BYTE bFirstEntry; rlm@9: BYTE bNumEntries; rlm@9: WORD wFlags; rlm@9: PALETTEENTRY peNew[]; rlm@9: } AVIPALCHANGE; rlm@9: * rlm@9: * typedef struct tagPALETTEENTRY { rlm@9: BYTE peRed; rlm@9: BYTE peGreen; rlm@9: BYTE peBlue; rlm@9: BYTE peFlags; rlm@9: } PALETTEENTRY; rlm@9: */ rlm@9: DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); rlm@9: pOut.writeByte(first);//bFirstEntry rlm@9: pOut.writeByte(last - first + 1);//bNumEntries rlm@9: pOut.writeShort(0);//wFlags rlm@9: rlm@9: for (int i = first; i <= last; i++) { rlm@9: pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red rlm@9: pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green rlm@9: pOut.writeByte(imgRGBs[i] & 0xff); // blue rlm@9: pOut.writeByte(0); // reserved*/ rlm@9: } rlm@9: rlm@9: moviChunk.add(paletteChangeChunk); rlm@9: paletteChangeChunk.finish(); rlm@9: long length = getRelativeStreamPosition() - offset; rlm@9: videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); rlm@9: offset = getRelativeStreamPosition(); rlm@9: } rlm@9: rlm@9: videoFrameChunk = new DataChunk("00db"); rlm@9: byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); rlm@9: byte[] rgb4 = new byte[imgWidth / 2]; rlm@9: for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down rlm@9: for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { rlm@9: rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); rlm@9: } rlm@9: videoFrameChunk.getOutputStream().write(rgb4); rlm@9: } rlm@9: break; rlm@9: } rlm@9: case 8: { rlm@9: IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); rlm@9: int[] imgRGBs = new int[256]; rlm@9: imgPalette.getRGBs(imgRGBs); rlm@9: int[] previousRGBs = new int[256]; rlm@9: if (previousPalette == null) { rlm@9: previousPalette = palette; rlm@9: } rlm@9: previousPalette.getRGBs(previousRGBs); rlm@9: if (!Arrays.equals(imgRGBs, previousRGBs)) { rlm@9: previousPalette = imgPalette; rlm@9: DataChunk paletteChangeChunk = new DataChunk("00pc"); rlm@9: /* rlm@9: int first = imgPalette.getMapSize(); rlm@9: int last = -1; rlm@9: for (int i = 0; i < 16; i++) { rlm@9: if (previousRGBs[i] != imgRGBs[i] && i < first) { rlm@9: first = i; rlm@9: } rlm@9: if (previousRGBs[i] != imgRGBs[i] && i > last) { rlm@9: last = i; rlm@9: } rlm@9: }*/ rlm@9: int first = 0; rlm@9: int last = imgPalette.getMapSize() - 1; rlm@9: /* rlm@9: * typedef struct { rlm@9: BYTE bFirstEntry; rlm@9: BYTE bNumEntries; rlm@9: WORD wFlags; rlm@9: PALETTEENTRY peNew[]; rlm@9: } AVIPALCHANGE; rlm@9: * rlm@9: * typedef struct tagPALETTEENTRY { rlm@9: BYTE peRed; rlm@9: BYTE peGreen; rlm@9: BYTE peBlue; rlm@9: BYTE peFlags; rlm@9: } PALETTEENTRY; rlm@9: */ rlm@9: DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); rlm@9: pOut.writeByte(first);//bFirstEntry rlm@9: pOut.writeByte(last - first + 1);//bNumEntries rlm@9: pOut.writeShort(0);//wFlags rlm@9: rlm@9: for (int i = first; i <= last; i++) { rlm@9: pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red rlm@9: pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green rlm@9: pOut.writeByte(imgRGBs[i] & 0xff); // blue rlm@9: pOut.writeByte(0); // reserved*/ rlm@9: } rlm@9: rlm@9: moviChunk.add(paletteChangeChunk); rlm@9: paletteChangeChunk.finish(); rlm@9: long length = getRelativeStreamPosition() - offset; rlm@9: videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); rlm@9: offset = getRelativeStreamPosition(); rlm@9: } rlm@9: rlm@9: videoFrameChunk = new DataChunk("00db"); rlm@9: byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); rlm@9: for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down rlm@9: videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); rlm@9: } rlm@9: break; rlm@9: } rlm@9: default: { rlm@9: videoFrameChunk = new DataChunk("00db"); rlm@9: WritableRaster raster = image.getRaster(); rlm@9: int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data rlm@9: byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data rlm@9: for (int y = imgHeight - 1; y >= 0; --y) { // Upside down rlm@9: raster.getPixels(0, y, imgWidth, 1, raw); rlm@9: for (int x = 0, n = imgWidth * 3; x < n; x += 3) { rlm@9: bytes[x + 2] = (byte) raw[x]; // Blue rlm@9: bytes[x + 1] = (byte) raw[x + 1]; // Green rlm@9: bytes[x] = (byte) raw[x + 2]; // Red rlm@9: } rlm@9: videoFrameChunk.getOutputStream().write(bytes); rlm@9: } rlm@9: break; rlm@9: } rlm@9: } rlm@9: break; rlm@9: } rlm@9: rlm@9: case JPG: { rlm@9: videoFrameChunk = new DataChunk("00dc"); rlm@9: ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); rlm@9: ImageWriteParam iwParam = iw.getDefaultWriteParam(); rlm@9: iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); rlm@9: iwParam.setCompressionQuality(quality); rlm@9: MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); rlm@9: iw.setOutput(imgOut); rlm@9: IIOImage img = new IIOImage(image, null, null); rlm@9: iw.write(null, img, iwParam); rlm@9: iw.dispose(); rlm@9: break; rlm@9: } rlm@9: case PNG: rlm@9: default: { rlm@9: videoFrameChunk = new DataChunk("00dc"); rlm@9: ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); rlm@9: ImageWriteParam iwParam = iw.getDefaultWriteParam(); rlm@9: MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); rlm@9: iw.setOutput(imgOut); rlm@9: IIOImage img = new IIOImage(image, null, null); rlm@9: iw.write(null, img, iwParam); rlm@9: iw.dispose(); rlm@9: break; rlm@9: } rlm@9: } rlm@9: long length = getRelativeStreamPosition() - offset; rlm@9: moviChunk.add(videoFrameChunk); rlm@9: videoFrameChunk.finish(); rlm@9: rlm@9: videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); rlm@9: if (getRelativeStreamPosition() > 1L << 32) { rlm@9: throw new IOException("AVI file is larger than 4 GB"); rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Writes a frame from a file to the video track. rlm@9: *

rlm@9: * This method does not inspect the contents of the file. rlm@9: * For example, Its your responsibility to only add JPG files if you have rlm@9: * chosen the JPEG video format. rlm@9: *

rlm@9: * If you add all frames from files or from input streams, then you rlm@9: * have to explicitly set the dimension of the video track before you rlm@9: * call finish() or close(). rlm@9: * rlm@9: * @param file The file which holds the image data. rlm@9: * rlm@9: * @throws IllegalStateException if the duration is less than 1. rlm@9: * @throws IOException if writing the image failed. rlm@9: */ rlm@9: public void writeFrame(File file) throws IOException { rlm@9: FileInputStream in = null; rlm@9: try { rlm@9: in = new FileInputStream(file); rlm@9: writeFrame(in); rlm@9: } finally { rlm@9: if (in != null) { rlm@9: in.close(); rlm@9: } rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Writes a frame to the video track. rlm@9: *

rlm@9: * This method does not inspect the contents of the file. rlm@9: * For example, its your responsibility to only add JPG files if you have rlm@9: * chosen the JPEG video format. rlm@9: *

rlm@9: * If you add all frames from files or from input streams, then you rlm@9: * have to explicitly set the dimension of the video track before you rlm@9: * call finish() or close(). rlm@9: * rlm@9: * @param in The input stream which holds the image data. rlm@9: * rlm@9: * @throws IllegalArgumentException if the duration is less than 1. rlm@9: * @throws IOException if writing the image failed. rlm@9: */ rlm@9: public void writeFrame(InputStream in) throws IOException { rlm@9: ensureOpen(); rlm@9: ensureStarted(); rlm@9: rlm@9: DataChunk videoFrameChunk = new DataChunk( rlm@9: videoFormat == VideoFormat.RAW ? "00db" : "00dc"); rlm@9: moviChunk.add(videoFrameChunk); rlm@9: OutputStream mdatOut = videoFrameChunk.getOutputStream(); rlm@9: long offset = getRelativeStreamPosition(); rlm@9: byte[] buf = new byte[512]; rlm@9: int len; rlm@9: while ((len = in.read(buf)) != -1) { rlm@9: mdatOut.write(buf, 0, len); rlm@9: } rlm@9: long length = getRelativeStreamPosition() - offset; rlm@9: videoFrameChunk.finish(); rlm@9: videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); rlm@9: if (getRelativeStreamPosition() > 1L << 32) { rlm@9: throw new IOException("AVI file is larger than 4 GB"); rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Closes the movie file as well as the stream being filtered. rlm@9: * rlm@9: * @exception IOException if an I/O error has occurred rlm@9: */ rlm@9: public void close() throws IOException { rlm@9: if (state == States.STARTED) { rlm@9: finish(); rlm@9: } rlm@9: if (state != States.CLOSED) { rlm@9: out.close(); rlm@9: state = States.CLOSED; rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Finishes writing the contents of the AVI output stream without closing rlm@9: * the underlying stream. Use this method when applying multiple filters rlm@9: * in succession to the same output stream. rlm@9: * rlm@9: * @exception IllegalStateException if the dimension of the video track rlm@9: * has not been specified or determined yet. rlm@9: * @exception IOException if an I/O exception has occurred rlm@9: */ rlm@9: public void finish() throws IOException { rlm@9: ensureOpen(); rlm@9: if (state != States.FINISHED) { rlm@9: if (imgWidth == -1 || imgHeight == -1) { rlm@9: throw new IllegalStateException("image width and height must be specified"); rlm@9: } rlm@9: rlm@9: moviChunk.finish(); rlm@9: writeEpilog(); rlm@9: state = States.FINISHED; rlm@9: imgWidth = imgHeight = -1; rlm@9: } rlm@9: } rlm@9: rlm@9: /** rlm@9: * Check to make sure that this stream has not been closed rlm@9: */ rlm@9: private void ensureOpen() throws IOException { rlm@9: if (state == States.CLOSED) { rlm@9: throw new IOException("Stream closed"); rlm@9: } rlm@9: } rlm@9: rlm@9: /** Gets the position relative to the beginning of the QuickTime stream. rlm@9: *

rlm@9: * Usually this value is equal to the stream position of the underlying rlm@9: * ImageOutputStream, but can be larger if the underlying stream already rlm@9: * contained data. rlm@9: * rlm@9: * @return The relative stream position. rlm@9: * @throws IOException rlm@9: */ rlm@9: private long getRelativeStreamPosition() throws IOException { rlm@9: return out.getStreamPosition() - streamOffset; rlm@9: } rlm@9: rlm@9: /** Seeks relative to the beginning of the QuickTime stream. rlm@9: *

rlm@9: * Usually this equal to seeking in the underlying ImageOutputStream, but rlm@9: * can be different if the underlying stream already contained data. rlm@9: * rlm@9: */ rlm@9: private void seekRelative(long newPosition) throws IOException { rlm@9: out.seek(newPosition + streamOffset); rlm@9: } rlm@9: rlm@9: private void writeProlog() throws IOException { rlm@9: // The file has the following structure: rlm@9: // rlm@9: // .RIFF AVI rlm@9: // ..avih (AVI Header Chunk) rlm@9: // ..LIST strl rlm@9: // ...strh (Stream Header Chunk) rlm@9: // ...strf (Stream Format Chunk) rlm@9: // ..LIST movi rlm@9: // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) rlm@9: // ..idx1 (List of video data chunks and their location in the file) rlm@9: rlm@9: // The RIFF AVI Chunk holds the complete movie rlm@9: aviChunk = new CompositeChunk("RIFF", "AVI "); rlm@9: CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); rlm@9: rlm@9: // Write empty AVI Main Header Chunk - we fill the data in later rlm@9: aviChunk.add(hdrlChunk); rlm@9: avihChunk = new FixedSizeDataChunk("avih", 56); rlm@9: avihChunk.seekToEndOfChunk(); rlm@9: hdrlChunk.add(avihChunk); rlm@9: rlm@9: CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); rlm@9: hdrlChunk.add(strlChunk); rlm@9: rlm@9: // Write empty AVI Stream Header Chunk - we fill the data in later rlm@9: strhChunk = new FixedSizeDataChunk("strh", 56); rlm@9: strhChunk.seekToEndOfChunk(); rlm@9: strlChunk.add(strhChunk); rlm@9: strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); rlm@9: strfChunk.seekToEndOfChunk(); rlm@9: strlChunk.add(strfChunk); rlm@9: rlm@9: moviChunk = new CompositeChunk("LIST", "movi"); rlm@9: aviChunk.add(moviChunk); rlm@9: rlm@9: rlm@9: } rlm@9: rlm@9: private void writeEpilog() throws IOException { rlm@9: // Compute values rlm@9: int duration = 0; rlm@9: for (Sample s : videoFrames) { rlm@9: duration += s.duration; rlm@9: } rlm@9: long bufferSize = 0; rlm@9: for (Sample s : videoFrames) { rlm@9: if (s.length > bufferSize) { rlm@9: bufferSize = s.length; rlm@9: } rlm@9: } rlm@9: rlm@9: rlm@9: DataChunkOutputStream d; rlm@9: rlm@9: /* Create Idx1 Chunk and write data rlm@9: * ------------- rlm@9: typedef struct _avioldindex { rlm@9: FOURCC fcc; rlm@9: DWORD cb; rlm@9: struct _avioldindex_entry { rlm@9: DWORD dwChunkId; rlm@9: DWORD dwFlags; rlm@9: DWORD dwOffset; rlm@9: DWORD dwSize; rlm@9: } aIndex[]; rlm@9: } AVIOLDINDEX; rlm@9: */ rlm@9: DataChunk idx1Chunk = new DataChunk("idx1"); rlm@9: aviChunk.add(idx1Chunk); rlm@9: d = idx1Chunk.getOutputStream(); rlm@9: long moviListOffset = moviChunk.offset + 8; rlm@9: //moviListOffset = 0; rlm@9: for (Sample f : videoFrames) { rlm@9: rlm@9: d.writeType(f.chunkType); // dwChunkId rlm@9: // Specifies a FOURCC that identifies a stream in the AVI file. The rlm@9: // FOURCC must have the form 'xxyy' where xx is the stream number and yy rlm@9: // is a two-character code that identifies the contents of the stream: rlm@9: // rlm@9: // Two-character code Description rlm@9: // db Uncompressed video frame rlm@9: // dc Compressed video frame rlm@9: // pc Palette change rlm@9: // wb Audio data rlm@9: rlm@9: d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// rlm@9: | (f.isSync ? 0x10 : 0x0)); // dwFlags rlm@9: // Specifies a bitwise combination of zero or more of the following rlm@9: // flags: rlm@9: // rlm@9: // Value Name Description rlm@9: // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. rlm@9: // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. rlm@9: // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the rlm@9: // stream. For example, this flag should be set for rlm@9: // palette changes. rlm@9: rlm@9: d.writeUInt(f.offset - moviListOffset); // dwOffset rlm@9: // Specifies the location of the data chunk in the file. The value rlm@9: // should be specified as an offset, in bytes, from the start of the rlm@9: // 'movi' list; however, in some AVI files it is given as an offset from rlm@9: // the start of the file. rlm@9: rlm@9: d.writeUInt(f.length); // dwSize rlm@9: // Specifies the size of the data chunk, in bytes. rlm@9: } rlm@9: idx1Chunk.finish(); rlm@9: rlm@9: /* Write Data into AVI Main Header Chunk rlm@9: * ------------- rlm@9: * The AVIMAINHEADER structure defines global information in an AVI file. rlm@9: * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx rlm@9: typedef struct _avimainheader { rlm@9: FOURCC fcc; rlm@9: DWORD cb; rlm@9: DWORD dwMicroSecPerFrame; rlm@9: DWORD dwMaxBytesPerSec; rlm@9: DWORD dwPaddingGranularity; rlm@9: DWORD dwFlags; rlm@9: DWORD dwTotalFrames; rlm@9: DWORD dwInitialFrames; rlm@9: DWORD dwStreams; rlm@9: DWORD dwSuggestedBufferSize; rlm@9: DWORD dwWidth; rlm@9: DWORD dwHeight; rlm@9: DWORD dwReserved[4]; rlm@9: } AVIMAINHEADER; */ rlm@9: avihChunk.seekToStartOfData(); rlm@9: d = avihChunk.getOutputStream(); rlm@9: rlm@9: d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame rlm@9: // Specifies the number of microseconds between frames. rlm@9: // This value indicates the overall timing for the file. rlm@9: rlm@9: d.writeUInt(0); // dwMaxBytesPerSec rlm@9: // Specifies the approximate maximum data rate of the file. rlm@9: // This value indicates the number of bytes per second the system rlm@9: // must handle to present an AVI sequence as specified by the other rlm@9: // parameters contained in the main header and stream header chunks. rlm@9: rlm@9: d.writeUInt(0); // dwPaddingGranularity rlm@9: // Specifies the alignment for data, in bytes. Pad the data to multiples rlm@9: // of this value. rlm@9: rlm@9: d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) rlm@9: // Contains a bitwise combination of zero or more of the following rlm@9: // flags: rlm@9: // rlm@9: // Value Name Description rlm@9: // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. rlm@9: // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the rlm@9: // index, rather than the physical ordering of the rlm@9: // chunks in the file, to determine the order of rlm@9: // presentation of the data. For example, this flag rlm@9: // could be used to create a list of frames for rlm@9: // editing. rlm@9: // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. rlm@9: // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially rlm@9: // allocated file used for capturing real-time rlm@9: // video. Applications should warn the user before rlm@9: // writing over a file with this flag set because rlm@9: // the user probably defragmented this file. rlm@9: // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted rlm@9: // data and software. When this flag is used, rlm@9: // software should not permit the data to be rlm@9: // duplicated. rlm@9: rlm@9: d.writeUInt(videoFrames.size()); // dwTotalFrames rlm@9: // Specifies the total number of frames of data in the file. rlm@9: rlm@9: d.writeUInt(0); // dwInitialFrames rlm@9: // Specifies the initial frame for interleaved files. Noninterleaved rlm@9: // files should specify zero. If you are creating interleaved files, rlm@9: // specify the number of frames in the file prior to the initial frame rlm@9: // of the AVI sequence in this member. rlm@9: // To give the audio driver enough audio to work with, the audio data in rlm@9: // an interleaved file must be skewed from the video data. Typically, rlm@9: // the audio data should be moved forward enough frames to allow rlm@9: // approximately 0.75 seconds of audio data to be preloaded. The rlm@9: // dwInitialRecords member should be set to the number of frames the rlm@9: // audio is skewed. Also set the same value for the dwInitialFrames rlm@9: // member of the AVISTREAMHEADER structure in the audio stream header rlm@9: rlm@9: d.writeUInt(1); // dwStreams rlm@9: // Specifies the number of streams in the file. For example, a file with rlm@9: // audio and video has two streams. rlm@9: rlm@9: d.writeUInt(bufferSize); // dwSuggestedBufferSize rlm@9: // Specifies the suggested buffer size for reading the file. Generally, rlm@9: // this size should be large enough to contain the largest chunk in the rlm@9: // file. If set to zero, or if it is too small, the playback software rlm@9: // will have to reallocate memory during playback, which will reduce rlm@9: // performance. For an interleaved file, the buffer size should be large rlm@9: // enough to read an entire record, and not just a chunk. rlm@9: rlm@9: rlm@9: d.writeUInt(imgWidth); // dwWidth rlm@9: // Specifies the width of the AVI file in pixels. rlm@9: rlm@9: d.writeUInt(imgHeight); // dwHeight rlm@9: // Specifies the height of the AVI file in pixels. rlm@9: rlm@9: d.writeUInt(0); // dwReserved[0] rlm@9: d.writeUInt(0); // dwReserved[1] rlm@9: d.writeUInt(0); // dwReserved[2] rlm@9: d.writeUInt(0); // dwReserved[3] rlm@9: // Reserved. Set this array to zero. rlm@9: rlm@9: /* Write Data into AVI Stream Header Chunk rlm@9: * ------------- rlm@9: * The AVISTREAMHEADER structure contains information about one stream rlm@9: * in an AVI file. rlm@9: * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx rlm@9: typedef struct _avistreamheader { rlm@9: FOURCC fcc; rlm@9: DWORD cb; rlm@9: FOURCC fccType; rlm@9: FOURCC fccHandler; rlm@9: DWORD dwFlags; rlm@9: WORD wPriority; rlm@9: WORD wLanguage; rlm@9: DWORD dwInitialFrames; rlm@9: DWORD dwScale; rlm@9: DWORD dwRate; rlm@9: DWORD dwStart; rlm@9: DWORD dwLength; rlm@9: DWORD dwSuggestedBufferSize; rlm@9: DWORD dwQuality; rlm@9: DWORD dwSampleSize; rlm@9: struct { rlm@9: short int left; rlm@9: short int top; rlm@9: short int right; rlm@9: short int bottom; rlm@9: } rcFrame; rlm@9: } AVISTREAMHEADER; rlm@9: */ rlm@9: strhChunk.seekToStartOfData(); rlm@9: d = strhChunk.getOutputStream(); rlm@9: d.writeType("vids"); // fccType - vids for video stream rlm@9: // Contains a FOURCC that specifies the type of the data contained in rlm@9: // the stream. The following standard AVI values for video and audio are rlm@9: // defined: rlm@9: // rlm@9: // FOURCC Description rlm@9: // 'auds' Audio stream rlm@9: // 'mids' MIDI stream rlm@9: // 'txts' Text stream rlm@9: // 'vids' Video stream rlm@9: rlm@9: switch (videoFormat) { rlm@9: case RAW: rlm@9: d.writeType("DIB "); // fccHandler - DIB for Raw RGB rlm@9: break; rlm@9: case RLE: rlm@9: d.writeType("RLE "); // fccHandler - Microsoft RLE rlm@9: break; rlm@9: case JPG: rlm@9: d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG rlm@9: break; rlm@9: case PNG: rlm@9: default: rlm@9: d.writeType("png "); // fccHandler - png for PNG rlm@9: break; rlm@9: } rlm@9: // Optionally, contains a FOURCC that identifies a specific data rlm@9: // handler. The data handler is the preferred handler for the stream. rlm@9: // For audio and video streams, this specifies the codec for decoding rlm@9: // the stream. rlm@9: rlm@9: if (imgDepth <= 8) { rlm@9: d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES rlm@9: } else { rlm@9: d.writeUInt(0); // dwFlags rlm@9: } rlm@9: rlm@9: // Contains any flags for the data stream. The bits in the high-order rlm@9: // word of these flags are specific to the type of data contained in the rlm@9: // stream. The following standard flags are defined: rlm@9: // rlm@9: // Value Name Description rlm@9: // AVISF_DISABLED 0x00000001 Indicates this stream should not rlm@9: // be enabled by default. rlm@9: // AVISF_VIDEO_PALCHANGES 0x00010000 rlm@9: // Indicates this video stream contains rlm@9: // palette changes. This flag warns the playback rlm@9: // software that it will need to animate the rlm@9: // palette. rlm@9: rlm@9: d.writeUShort(0); // wPriority rlm@9: // Specifies priority of a stream type. For example, in a file with rlm@9: // multiple audio streams, the one with the highest priority might be rlm@9: // the default stream. rlm@9: rlm@9: d.writeUShort(0); // wLanguage rlm@9: // Language tag. rlm@9: rlm@9: d.writeUInt(0); // dwInitialFrames rlm@9: // Specifies how far audio data is skewed ahead of the video frames in rlm@9: // interleaved files. Typically, this is about 0.75 seconds. If you are rlm@9: // creating interleaved files, specify the number of frames in the file rlm@9: // prior to the initial frame of the AVI sequence in this member. For rlm@9: // more information, see the remarks for the dwInitialFrames member of rlm@9: // the AVIMAINHEADER structure. rlm@9: rlm@9: d.writeUInt(timeScale); // dwScale rlm@9: // Used with dwRate to specify the time scale that this stream will use. rlm@9: // Dividing dwRate by dwScale gives the number of samples per second. rlm@9: // For video streams, this is the frame rate. For audio streams, this rlm@9: // rate corresponds to the time needed to play nBlockAlign bytes of rlm@9: // audio, which for PCM audio is the just the sample rate. rlm@9: rlm@9: d.writeUInt(frameRate); // dwRate rlm@9: // See dwScale. rlm@9: rlm@9: d.writeUInt(0); // dwStart rlm@9: // Specifies the starting time for this stream. The units are defined by rlm@9: // the dwRate and dwScale members in the main file header. Usually, this rlm@9: // is zero, but it can specify a delay time for a stream that does not rlm@9: // start concurrently with the file. rlm@9: rlm@9: d.writeUInt(videoFrames.size()); // dwLength rlm@9: // Specifies the length of this stream. The units are defined by the rlm@9: // dwRate and dwScale members of the stream's header. rlm@9: rlm@9: d.writeUInt(bufferSize); // dwSuggestedBufferSize rlm@9: // Specifies how large a buffer should be used to read this stream. rlm@9: // Typically, this contains a value corresponding to the largest chunk rlm@9: // present in the stream. Using the correct buffer size makes playback rlm@9: // more efficient. Use zero if you do not know the correct buffer size. rlm@9: rlm@9: d.writeInt(-1); // dwQuality rlm@9: // Specifies an indicator of the quality of the data in the stream. rlm@9: // Quality is represented as a number between 0 and 10,000. rlm@9: // For compressed data, this typically represents the value of the rlm@9: // quality parameter passed to the compression software. If set to –1, rlm@9: // drivers use the default quality value. rlm@9: rlm@9: d.writeUInt(0); // dwSampleSize rlm@9: // Specifies the size of a single sample of data. This is set to zero rlm@9: // if the samples can vary in size. If this number is nonzero, then rlm@9: // multiple samples of data can be grouped into a single chunk within rlm@9: // the file. If it is zero, each sample of data (such as a video frame) rlm@9: // must be in a separate chunk. For video streams, this number is rlm@9: // typically zero, although it can be nonzero if all video frames are rlm@9: // the same size. For audio streams, this number should be the same as rlm@9: // the nBlockAlign member of the WAVEFORMATEX structure describing the rlm@9: // audio. rlm@9: rlm@9: d.writeUShort(0); // rcFrame.left rlm@9: d.writeUShort(0); // rcFrame.top rlm@9: d.writeUShort(imgWidth); // rcFrame.right rlm@9: d.writeUShort(imgHeight); // rcFrame.bottom rlm@9: // Specifies the destination rectangle for a text or video stream within rlm@9: // the movie rectangle specified by the dwWidth and dwHeight members of rlm@9: // the AVI main header structure. The rcFrame member is typically used rlm@9: // in support of multiple video streams. Set this rectangle to the rlm@9: // coordinates corresponding to the movie rectangle to update the whole rlm@9: // movie rectangle. Units for this member are pixels. The upper-left rlm@9: // corner of the destination rectangle is relative to the upper-left rlm@9: // corner of the movie rectangle. rlm@9: rlm@9: /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk rlm@9: /* ------------- rlm@9: * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx rlm@9: typedef struct tagBITMAPINFOHEADER { rlm@9: DWORD biSize; rlm@9: LONG biWidth; rlm@9: LONG biHeight; rlm@9: WORD biPlanes; rlm@9: WORD biBitCount; rlm@9: DWORD biCompression; rlm@9: DWORD biSizeImage; rlm@9: LONG biXPelsPerMeter; rlm@9: LONG biYPelsPerMeter; rlm@9: DWORD biClrUsed; rlm@9: DWORD biClrImportant; rlm@9: } BITMAPINFOHEADER; rlm@9: */ rlm@9: strfChunk.seekToStartOfData(); rlm@9: d = strfChunk.getOutputStream(); rlm@9: d.writeUInt(40); // biSize rlm@9: // Specifies the number of bytes required by the structure. This value rlm@9: // does not include the size of the color table or the size of the color rlm@9: // masks, if they are appended to the end of structure. rlm@9: rlm@9: d.writeInt(imgWidth); // biWidth rlm@9: // Specifies the width of the bitmap, in pixels. rlm@9: rlm@9: d.writeInt(imgHeight); // biHeight rlm@9: // Specifies the height of the bitmap, in pixels. rlm@9: // rlm@9: // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is rlm@9: // a bottom-up DIB with the origin at the lower left corner. If biHeight rlm@9: // is negative, the bitmap is a top-down DIB with the origin at the rlm@9: // upper left corner. rlm@9: // For YUV bitmaps, the bitmap is always top-down, regardless of the rlm@9: // sign of biHeight. Decoders should offer YUV formats with postive rlm@9: // biHeight, but for backward compatibility they should accept YUV rlm@9: // formats with either positive or negative biHeight. rlm@9: // For compressed formats, biHeight must be positive, regardless of rlm@9: // image orientation. rlm@9: rlm@9: d.writeShort(1); // biPlanes rlm@9: // Specifies the number of planes for the target device. This value must rlm@9: // be set to 1. rlm@9: rlm@9: d.writeShort(imgDepth); // biBitCount rlm@9: // Specifies the number of bits per pixel (bpp). For uncompressed rlm@9: // formats, this value is the average number of bits per pixel. For rlm@9: // compressed formats, this value is the implied bit depth of the rlm@9: // uncompressed image, after the image has been decoded. rlm@9: rlm@9: switch (videoFormat) { rlm@9: case RAW: rlm@9: default: rlm@9: d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB rlm@9: break; rlm@9: case RLE: rlm@9: if (imgDepth == 8) { rlm@9: d.writeInt(1); // biCompression - BI_RLE8 rlm@9: } else if (imgDepth == 4) { rlm@9: d.writeInt(2); // biCompression - BI_RLE4 rlm@9: } else { rlm@9: throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); rlm@9: } rlm@9: break; rlm@9: case JPG: rlm@9: d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG rlm@9: break; rlm@9: case PNG: rlm@9: d.writeType("png "); // biCompression - png for PNG rlm@9: break; rlm@9: } rlm@9: // For compressed video and YUV formats, this member is a FOURCC code, rlm@9: // specified as a DWORD in little-endian order. For example, YUYV video rlm@9: // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC rlm@9: // Codes. rlm@9: // rlm@9: // For uncompressed RGB formats, the following values are possible: rlm@9: // rlm@9: // Value Description rlm@9: // BI_RGB 0x00000000 Uncompressed RGB. rlm@9: // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. rlm@9: // Valid for 16-bpp and 32-bpp bitmaps. rlm@9: // rlm@9: // Note that BI_JPG and BI_PNG are not valid video formats. rlm@9: // rlm@9: // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is rlm@9: // always RGB 555. If biCompression equals BI_BITFIELDS, the format is rlm@9: // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE rlm@9: // structure to determine the specific RGB type. rlm@9: rlm@9: switch (videoFormat) { rlm@9: case RAW: rlm@9: d.writeInt(0); // biSizeImage rlm@9: break; rlm@9: case RLE: rlm@9: case JPG: rlm@9: case PNG: rlm@9: default: rlm@9: if (imgDepth == 4) { rlm@9: d.writeInt(imgWidth * imgHeight / 2); // biSizeImage rlm@9: } else { rlm@9: int bytesPerPixel = Math.max(1, imgDepth / 8); rlm@9: d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage rlm@9: } rlm@9: break; rlm@9: } rlm@9: // Specifies the size, in bytes, of the image. This can be set to 0 for rlm@9: // uncompressed RGB bitmaps. rlm@9: rlm@9: d.writeInt(0); // biXPelsPerMeter rlm@9: // Specifies the horizontal resolution, in pixels per meter, of the rlm@9: // target device for the bitmap. rlm@9: rlm@9: d.writeInt(0); // biYPelsPerMeter rlm@9: // Specifies the vertical resolution, in pixels per meter, of the target rlm@9: // device for the bitmap. rlm@9: rlm@9: d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed rlm@9: // Specifies the number of color indices in the color table that are rlm@9: // actually used by the bitmap. rlm@9: rlm@9: d.writeInt(0); // biClrImportant rlm@9: // Specifies the number of color indices that are considered important rlm@9: // for displaying the bitmap. If this value is zero, all colors are rlm@9: // important. rlm@9: rlm@9: if (palette != null) { rlm@9: for (int i = 0, n = palette.getMapSize(); i < n; ++i) { rlm@9: /* rlm@9: * typedef struct tagRGBQUAD { rlm@9: BYTE rgbBlue; rlm@9: BYTE rgbGreen; rlm@9: BYTE rgbRed; rlm@9: BYTE rgbReserved; // This member is reserved and must be zero. rlm@9: } RGBQUAD; rlm@9: */ rlm@9: d.write(palette.getBlue(i)); rlm@9: d.write(palette.getGreen(i)); rlm@9: d.write(palette.getRed(i)); rlm@9: d.write(0); rlm@9: } rlm@9: } rlm@9: rlm@9: rlm@9: // ----------------- rlm@9: aviChunk.finish(); rlm@9: } rlm@9: }