Mercurial > jmeCapture
changeset 10:4c5fc53778c1
moved randelshofer stuff to rightfull place, enabled XuggleVideoRecorder
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Wed, 26 Oct 2011 09:38:27 -0700 |
parents | 5dfc9e768816 |
children | 8a6b1684f536 |
files | build.xml src/ca/randelshofer/AVIOutputStream.java src/ca/randelshofer/DataChunkOutputStream.java src/ca/randelshofer/ImageOutputStreamAdapter.java src/ca/randelshofer/SeekableByteArrayOutputStream.java src/com/aurellem/capture/Capture.java src/com/aurellem/capture/Main.java src/com/aurellem/capture/audio/SeekableByteArrayOutputStream.java src/com/aurellem/capture/hello/HelloVideo.java src/com/aurellem/capture/video/AVIOutputStream.java src/com/aurellem/capture/video/AVIVideoRecorder.java src/com/aurellem/capture/video/DataChunkOutputStream.java src/com/aurellem/capture/video/ImageOutputStreamAdapter.java src/com/aurellem/capture/video/MicrosoftRLEEncoder.java src/com/aurellem/capture/video/XuggleVideoRecorder.java |
diffstat | 15 files changed, 2061 insertions(+), 2472 deletions(-) [+] |
line wrap: on
line diff
1.1 --- a/build.xml Wed Oct 26 08:54:12 2011 -0700 1.2 +++ b/build.xml Wed Oct 26 09:38:27 2011 -0700 1.3 @@ -9,6 +9,9 @@ 1.4 <path id="classpath"> 1.5 <pathelement path="${lib}/jme"/> 1.6 <pathelement path="${lib}/lwjgl.jar"/> 1.7 + <pathelement path="${lib}/xuggle/xuggle-xuggler.jar"/> 1.8 + 1.9 + 1.10 </path> 1.11 1.12 <target name="prepare">
2.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 2.2 +++ b/src/ca/randelshofer/AVIOutputStream.java Wed Oct 26 09:38:27 2011 -0700 2.3 @@ -0,0 +1,1515 @@ 2.4 +/** 2.5 + * @(#)AVIOutputStream.java 1.5.1 2011-01-17 2.6 + * 2.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. 2.8 + * All rights reserved. 2.9 + * 2.10 + * You may not use, copy or modify this file, except in compliance with the 2.11 + * license agreement you entered into with Werner Randelshofer. 2.12 + * For details see accompanying license terms. 2.13 + */ 2.14 +package ca.randelshofer; 2.15 + 2.16 +import java.awt.Dimension; 2.17 +import java.awt.image.BufferedImage; 2.18 +import java.awt.image.DataBufferByte; 2.19 +import java.awt.image.IndexColorModel; 2.20 +import java.awt.image.WritableRaster; 2.21 +import java.io.File; 2.22 +import java.io.FileInputStream; 2.23 +import java.io.IOException; 2.24 +import java.io.InputStream; 2.25 +import java.io.OutputStream; 2.26 +import java.util.Arrays; 2.27 +import java.util.Date; 2.28 +import java.util.LinkedList; 2.29 + 2.30 +import javax.imageio.IIOImage; 2.31 +import javax.imageio.ImageIO; 2.32 +import javax.imageio.ImageWriteParam; 2.33 +import javax.imageio.ImageWriter; 2.34 +import javax.imageio.stream.FileImageOutputStream; 2.35 +import javax.imageio.stream.ImageOutputStream; 2.36 +import javax.imageio.stream.MemoryCacheImageOutputStream; 2.37 + 2.38 +/** 2.39 + * This class supports writing of images into an AVI 1.0 video file. 2.40 + * <p> 2.41 + * The images are written as video frames. 2.42 + * <p> 2.43 + * Video frames can be encoded with one of the following formats: 2.44 + * <ul> 2.45 + * <li>JPEG</li> 2.46 + * <li>PNG</li> 2.47 + * <li>RAW</li> 2.48 + * <li>RLE</li> 2.49 + * </ul> 2.50 + * All frames must have the same format. 2.51 + * When JPG is used each frame can have an individual encoding quality. 2.52 + * <p> 2.53 + * All frames in an AVI file must have the same duration. The duration can 2.54 + * be set by setting an appropriate pair of values using methods 2.55 + * {@link #setFrameRate} and {@link #setTimeScale}. 2.56 + * <p> 2.57 + * The length of an AVI 1.0 file is limited to 1 GB. 2.58 + * This class supports lengths of up to 4 GB, but such files may not work on 2.59 + * all players. 2.60 + * <p> 2.61 + * For detailed information about the AVI RIFF file format see:<br> 2.62 + * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br> 2.63 + * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br> 2.64 + * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br> 2.65 + * 2.66 + * @author Werner Randelshofer 2.67 + * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. 2.68 + * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format. 2.69 + * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets 2.70 + * in "idx1" chunk. 2.71 + * <br>1.3.2 2010-12-27 File size limit is 1 GB. 2.72 + * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets. 2.73 + * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream. 2.74 + * Added method getVideoDimension(). 2.75 + * <br>1.2 2009-08-29 Adds support for RAW video format. 2.76 + * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih 2.77 + * chunk. Changed the API to reflect that AVI works with frame rates instead of 2.78 + * with frame durations. 2.79 + * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG 2.80 + * encoded video. 2.81 + * <br>1.0 2008-08-11 Created. 2.82 + */ 2.83 +public class AVIOutputStream { 2.84 + 2.85 + /** 2.86 + * Underlying output stream. 2.87 + */ 2.88 + private ImageOutputStream out; 2.89 + /** The offset of the QuickTime stream in the underlying ImageOutputStream. 2.90 + * Normally this is 0 unless the underlying stream already contained data 2.91 + * when it was passed to the constructor. 2.92 + */ 2.93 + private long streamOffset; 2.94 + /** Previous frame for delta compression. */ 2.95 + 2.96 + /** 2.97 + * Supported video encodings. 2.98 + */ 2.99 + public static enum VideoFormat { 2.100 + 2.101 + RAW, RLE, JPG, PNG; 2.102 + } 2.103 + /** 2.104 + * Current video formats. 2.105 + */ 2.106 + private VideoFormat videoFormat; 2.107 + /** 2.108 + * Quality of JPEG encoded video frames. 2.109 + */ 2.110 + private float quality = 0.9f; 2.111 + /** 2.112 + * Width of the video frames. All frames must have the same width. 2.113 + * The value -1 is used to mark unspecified width. 2.114 + */ 2.115 + private int imgWidth = -1; 2.116 + /** 2.117 + * Height of the video frames. All frames must have the same height. 2.118 + * The value -1 is used to mark unspecified height. 2.119 + */ 2.120 + private int imgHeight = -1; 2.121 + /** Number of bits per pixel. */ 2.122 + private int imgDepth = 24; 2.123 + /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ 2.124 + private IndexColorModel palette; 2.125 + private IndexColorModel previousPalette; 2.126 + /** Video encoder. */ 2.127 + 2.128 + /** 2.129 + * The timeScale of the movie. 2.130 + * <p> 2.131 + * Used with frameRate to specify the time scale that this stream will use. 2.132 + * Dividing frameRate by timeScale gives the number of samples per second. 2.133 + * For video streams, this is the frame rate. For audio streams, this rate 2.134 + * corresponds to the time needed to play nBlockAlign bytes of audio, which 2.135 + * for PCM audio is the just the sample rate. 2.136 + */ 2.137 + private int timeScale = 1; 2.138 + /** 2.139 + * The frameRate of the movie in timeScale units. 2.140 + * <p> 2.141 + * @see timeScale 2.142 + */ 2.143 + private int frameRate = 30; 2.144 + /** 2.145 + * The states of the movie output stream. 2.146 + */ 2.147 + private static enum States { 2.148 + 2.149 + STARTED, FINISHED, CLOSED; 2.150 + } 2.151 + /** 2.152 + * The current state of the movie output stream. 2.153 + */ 2.154 + private States state = States.FINISHED; 2.155 + 2.156 + /** 2.157 + * AVI stores media data in samples. 2.158 + * A sample is a single element in a sequence of time-ordered data. 2.159 + */ 2.160 + private static class Sample { 2.161 + 2.162 + String chunkType; 2.163 + /** Offset of the sample relative to the start of the AVI file. 2.164 + */ 2.165 + long offset; 2.166 + /** Data length of the sample. */ 2.167 + long length; 2.168 + /** Whether the sample is a sync-sample. */ 2.169 + boolean isSync; 2.170 + 2.171 + /** 2.172 + * Creates a new sample. 2.173 + * @param duration 2.174 + * @param offset 2.175 + * @param length 2.176 + */ 2.177 + public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { 2.178 + this.chunkType = chunkId; 2.179 + this.offset = offset; 2.180 + this.length = length; 2.181 + this.isSync = isSync; 2.182 + } 2.183 + } 2.184 + /** 2.185 + * List of video frames. 2.186 + */ 2.187 + private LinkedList<Sample> videoFrames; 2.188 + /** 2.189 + * This chunk holds the whole AVI content. 2.190 + */ 2.191 + private CompositeChunk aviChunk; 2.192 + /** 2.193 + * This chunk holds the movie frames. 2.194 + */ 2.195 + private CompositeChunk moviChunk; 2.196 + /** 2.197 + * This chunk holds the AVI Main Header. 2.198 + */ 2.199 + FixedSizeDataChunk avihChunk; 2.200 + /** 2.201 + * This chunk holds the AVI Stream Header. 2.202 + */ 2.203 + FixedSizeDataChunk strhChunk; 2.204 + /** 2.205 + * This chunk holds the AVI Stream Format Header. 2.206 + */ 2.207 + FixedSizeDataChunk strfChunk; 2.208 + 2.209 + /** 2.210 + * Chunk base class. 2.211 + */ 2.212 + private abstract class Chunk { 2.213 + 2.214 + /** 2.215 + * The chunkType of the chunk. A String with the length of 4 characters. 2.216 + */ 2.217 + protected String chunkType; 2.218 + /** 2.219 + * The offset of the chunk relative to the start of the 2.220 + * ImageOutputStream. 2.221 + */ 2.222 + protected long offset; 2.223 + 2.224 + /** 2.225 + * Creates a new Chunk at the current position of the ImageOutputStream. 2.226 + * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. 2.227 + */ 2.228 + public Chunk(String chunkType) throws IOException { 2.229 + this.chunkType = chunkType; 2.230 + offset = getRelativeStreamPosition(); 2.231 + } 2.232 + 2.233 + /** 2.234 + * Writes the chunk to the ImageOutputStream and disposes it. 2.235 + */ 2.236 + public abstract void finish() throws IOException; 2.237 + 2.238 + /** 2.239 + * Returns the size of the chunk including the size of the chunk header. 2.240 + * @return The size of the chunk. 2.241 + */ 2.242 + public abstract long size(); 2.243 + } 2.244 + 2.245 + /** 2.246 + * A CompositeChunk contains an ordered list of Chunks. 2.247 + */ 2.248 + private class CompositeChunk extends Chunk { 2.249 + 2.250 + /** 2.251 + * The type of the composite. A String with the length of 4 characters. 2.252 + */ 2.253 + protected String compositeType; 2.254 + private LinkedList<Chunk> children; 2.255 + private boolean finished; 2.256 + 2.257 + /** 2.258 + * Creates a new CompositeChunk at the current position of the 2.259 + * ImageOutputStream. 2.260 + * @param compositeType The type of the composite. 2.261 + * @param chunkType The type of the chunk. 2.262 + */ 2.263 + public CompositeChunk(String compositeType, String chunkType) throws IOException { 2.264 + super(chunkType); 2.265 + this.compositeType = compositeType; 2.266 + //out.write 2.267 + out.writeLong(0); // make room for the chunk header 2.268 + out.writeInt(0); // make room for the chunk header 2.269 + children = new LinkedList<Chunk>(); 2.270 + } 2.271 + 2.272 + public void add(Chunk child) throws IOException { 2.273 + if (children.size() > 0) { 2.274 + children.getLast().finish(); 2.275 + } 2.276 + children.add(child); 2.277 + } 2.278 + 2.279 + /** 2.280 + * Writes the chunk and all its children to the ImageOutputStream 2.281 + * and disposes of all resources held by the chunk. 2.282 + * @throws java.io.IOException 2.283 + */ 2.284 + @Override 2.285 + public void finish() throws IOException { 2.286 + if (!finished) { 2.287 + if (size() > 0xffffffffL) { 2.288 + throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); 2.289 + } 2.290 + 2.291 + long pointer = getRelativeStreamPosition(); 2.292 + seekRelative(offset); 2.293 + 2.294 + DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 2.295 + headerData.writeType(compositeType); 2.296 + headerData.writeUInt(size() - 8); 2.297 + headerData.writeType(chunkType); 2.298 + for (Chunk child : children) { 2.299 + child.finish(); 2.300 + } 2.301 + seekRelative(pointer); 2.302 + if (size() % 2 == 1) { 2.303 + out.writeByte(0); // write pad byte 2.304 + } 2.305 + finished = true; 2.306 + } 2.307 + } 2.308 + 2.309 + @Override 2.310 + public long size() { 2.311 + long length = 12; 2.312 + for (Chunk child : children) { 2.313 + length += child.size() + child.size() % 2; 2.314 + } 2.315 + return length; 2.316 + } 2.317 + } 2.318 + 2.319 + /** 2.320 + * Data Chunk. 2.321 + */ 2.322 + private class DataChunk extends Chunk { 2.323 + 2.324 + private DataChunkOutputStream data; 2.325 + private boolean finished; 2.326 + 2.327 + /** 2.328 + * Creates a new DataChunk at the current position of the 2.329 + * ImageOutputStream. 2.330 + * @param chunkType The chunkType of the chunk. 2.331 + */ 2.332 + public DataChunk(String name) throws IOException { 2.333 + super(name); 2.334 + out.writeLong(0); // make room for the chunk header 2.335 + data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); 2.336 + } 2.337 + 2.338 + public DataChunkOutputStream getOutputStream() { 2.339 + if (finished) { 2.340 + throw new IllegalStateException("DataChunk is finished"); 2.341 + } 2.342 + return data; 2.343 + } 2.344 + 2.345 + @Override 2.346 + public void finish() throws IOException { 2.347 + if (!finished) { 2.348 + long sizeBefore = size(); 2.349 + 2.350 + if (size() > 0xffffffffL) { 2.351 + throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); 2.352 + } 2.353 + 2.354 + long pointer = getRelativeStreamPosition(); 2.355 + seekRelative(offset); 2.356 + 2.357 + DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 2.358 + headerData.writeType(chunkType); 2.359 + headerData.writeUInt(size() - 8); 2.360 + seekRelative(pointer); 2.361 + if (size() % 2 == 1) { 2.362 + out.writeByte(0); // write pad byte 2.363 + } 2.364 + finished = true; 2.365 + long sizeAfter = size(); 2.366 + if (sizeBefore != sizeAfter) { 2.367 + System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); 2.368 + } 2.369 + } 2.370 + } 2.371 + 2.372 + @Override 2.373 + public long size() { 2.374 + return 8 + data.size(); 2.375 + } 2.376 + } 2.377 + 2.378 + /** 2.379 + * A DataChunk with a fixed size. 2.380 + */ 2.381 + private class FixedSizeDataChunk extends Chunk { 2.382 + 2.383 + private DataChunkOutputStream data; 2.384 + private boolean finished; 2.385 + private long fixedSize; 2.386 + 2.387 + /** 2.388 + * Creates a new DataChunk at the current position of the 2.389 + * ImageOutputStream. 2.390 + * @param chunkType The chunkType of the chunk. 2.391 + */ 2.392 + public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { 2.393 + super(chunkType); 2.394 + this.fixedSize = fixedSize; 2.395 + data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 2.396 + data.writeType(chunkType); 2.397 + data.writeUInt(fixedSize); 2.398 + data.clearCount(); 2.399 + 2.400 + // Fill fixed size with nulls 2.401 + byte[] buf = new byte[(int) Math.min(512, fixedSize)]; 2.402 + long written = 0; 2.403 + while (written < fixedSize) { 2.404 + data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); 2.405 + written += Math.min(buf.length, fixedSize - written); 2.406 + } 2.407 + if (fixedSize % 2 == 1) { 2.408 + out.writeByte(0); // write pad byte 2.409 + } 2.410 + seekToStartOfData(); 2.411 + } 2.412 + 2.413 + public DataChunkOutputStream getOutputStream() { 2.414 + /*if (finished) { 2.415 + throw new IllegalStateException("DataChunk is finished"); 2.416 + }*/ 2.417 + return data; 2.418 + } 2.419 + 2.420 + public void seekToStartOfData() throws IOException { 2.421 + seekRelative(offset + 8); 2.422 + data.clearCount(); 2.423 + } 2.424 + 2.425 + public void seekToEndOfChunk() throws IOException { 2.426 + seekRelative(offset + 8 + fixedSize + fixedSize % 2); 2.427 + } 2.428 + 2.429 + @Override 2.430 + public void finish() throws IOException { 2.431 + if (!finished) { 2.432 + finished = true; 2.433 + } 2.434 + } 2.435 + 2.436 + @Override 2.437 + public long size() { 2.438 + return 8 + fixedSize; 2.439 + } 2.440 + } 2.441 + 2.442 + /** 2.443 + * Creates a new AVI file with the specified video format and 2.444 + * frame rate. The video has 24 bits per pixel. 2.445 + * 2.446 + * @param file the output file 2.447 + * @param format Selects an encoder for the video format. 2.448 + * @param bitsPerPixel the number of bits per pixel. 2.449 + * @exception IllegalArgumentException if videoFormat is null or if 2.450 + * frame rate is <= 0 2.451 + */ 2.452 + public AVIOutputStream(File file, VideoFormat format) throws IOException { 2.453 + this(file,format,24); 2.454 + } 2.455 + /** 2.456 + * Creates a new AVI file with the specified video format and 2.457 + * frame rate. 2.458 + * 2.459 + * @param file the output file 2.460 + * @param format Selects an encoder for the video format. 2.461 + * @param bitsPerPixel the number of bits per pixel. 2.462 + * @exception IllegalArgumentException if videoFormat is null or if 2.463 + * frame rate is <= 0 2.464 + */ 2.465 + public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { 2.466 + if (format == null) { 2.467 + throw new IllegalArgumentException("format must not be null"); 2.468 + } 2.469 + 2.470 + if (file.exists()) { 2.471 + file.delete(); 2.472 + } 2.473 + this.out = new FileImageOutputStream(file); 2.474 + this.streamOffset = 0; 2.475 + this.videoFormat = format; 2.476 + this.videoFrames = new LinkedList<Sample>(); 2.477 + this.imgDepth = bitsPerPixel; 2.478 + if (imgDepth == 4) { 2.479 + byte[] gray = new byte[16]; 2.480 + for (int i = 0; i < gray.length; i++) { 2.481 + gray[i] = (byte) ((i << 4) | i); 2.482 + } 2.483 + palette = new IndexColorModel(4, 16, gray, gray, gray); 2.484 + } else if (imgDepth == 8) { 2.485 + byte[] gray = new byte[256]; 2.486 + for (int i = 0; i < gray.length; i++) { 2.487 + gray[i] = (byte) i; 2.488 + } 2.489 + palette = new IndexColorModel(8, 256, gray, gray, gray); 2.490 + } 2.491 + 2.492 + } 2.493 + 2.494 + /** 2.495 + * Creates a new AVI output stream with the specified video format and 2.496 + * framerate. 2.497 + * 2.498 + * @param out the underlying output stream 2.499 + * @param format Selects an encoder for the video format. 2.500 + * @exception IllegalArgumentException if videoFormat is null or if 2.501 + * framerate is <= 0 2.502 + */ 2.503 + public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { 2.504 + if (format == null) { 2.505 + throw new IllegalArgumentException("format must not be null"); 2.506 + } 2.507 + this.out = out; 2.508 + this.streamOffset = out.getStreamPosition(); 2.509 + this.videoFormat = format; 2.510 + this.videoFrames = new LinkedList<Sample>(); 2.511 + } 2.512 + 2.513 + /** 2.514 + * Used with frameRate to specify the time scale that this stream will use. 2.515 + * Dividing frameRate by timeScale gives the number of samples per second. 2.516 + * For video streams, this is the frame rate. For audio streams, this rate 2.517 + * corresponds to the time needed to play nBlockAlign bytes of audio, which 2.518 + * for PCM audio is the just the sample rate. 2.519 + * <p> 2.520 + * The default value is 1. 2.521 + * 2.522 + * @param newValue 2.523 + */ 2.524 + public void setTimeScale(int newValue) { 2.525 + if (newValue <= 0) { 2.526 + throw new IllegalArgumentException("timeScale must be greater 0"); 2.527 + } 2.528 + this.timeScale = newValue; 2.529 + } 2.530 + 2.531 + /** 2.532 + * Returns the time scale of this media. 2.533 + * 2.534 + * @return time scale 2.535 + */ 2.536 + public int getTimeScale() { 2.537 + return timeScale; 2.538 + } 2.539 + 2.540 + /** 2.541 + * Sets the rate of video frames in time scale units. 2.542 + * <p> 2.543 + * The default value is 30. Together with the default value 1 of timeScale 2.544 + * this results in 30 frames pers second. 2.545 + * 2.546 + * @param newValue 2.547 + */ 2.548 + public void setFrameRate(int newValue) { 2.549 + if (newValue <= 0) { 2.550 + throw new IllegalArgumentException("frameDuration must be greater 0"); 2.551 + } 2.552 + if (state == States.STARTED) { 2.553 + throw new IllegalStateException("frameDuration must be set before the first frame is written"); 2.554 + } 2.555 + this.frameRate = newValue; 2.556 + } 2.557 + 2.558 + /** 2.559 + * Returns the frame rate of this media. 2.560 + * 2.561 + * @return frame rate 2.562 + */ 2.563 + public int getFrameRate() { 2.564 + return frameRate; 2.565 + } 2.566 + 2.567 + /** Sets the global color palette. */ 2.568 + public void setPalette(IndexColorModel palette) { 2.569 + this.palette = palette; 2.570 + } 2.571 + 2.572 + /** 2.573 + * Sets the compression quality of the video track. 2.574 + * A value of 0 stands for "high compression is important" a value of 2.575 + * 1 for "high image quality is important". 2.576 + * <p> 2.577 + * Changing this value affects frames which are subsequently written 2.578 + * to the AVIOutputStream. Frames which have already been written 2.579 + * are not changed. 2.580 + * <p> 2.581 + * This value has only effect on videos encoded with JPG format. 2.582 + * <p> 2.583 + * The default value is 0.9. 2.584 + * 2.585 + * @param newValue 2.586 + */ 2.587 + public void setVideoCompressionQuality(float newValue) { 2.588 + this.quality = newValue; 2.589 + } 2.590 + 2.591 + /** 2.592 + * Returns the video compression quality. 2.593 + * 2.594 + * @return video compression quality 2.595 + */ 2.596 + public float getVideoCompressionQuality() { 2.597 + return quality; 2.598 + } 2.599 + 2.600 + /** 2.601 + * Sets the dimension of the video track. 2.602 + * <p> 2.603 + * You need to explicitly set the dimension, if you add all frames from 2.604 + * files or input streams. 2.605 + * <p> 2.606 + * If you add frames from buffered images, then AVIOutputStream 2.607 + * can determine the video dimension from the image width and height. 2.608 + * 2.609 + * @param width Must be greater than 0. 2.610 + * @param height Must be greater than 0. 2.611 + */ 2.612 + public void setVideoDimension(int width, int height) { 2.613 + if (width < 1 || height < 1) { 2.614 + throw new IllegalArgumentException("width and height must be greater zero."); 2.615 + } 2.616 + this.imgWidth = width; 2.617 + this.imgHeight = height; 2.618 + } 2.619 + 2.620 + /** 2.621 + * Gets the dimension of the video track. 2.622 + * <p> 2.623 + * Returns null if the dimension is not known. 2.624 + */ 2.625 + public Dimension getVideoDimension() { 2.626 + if (imgWidth < 1 || imgHeight < 1) { 2.627 + return null; 2.628 + } 2.629 + return new Dimension(imgWidth, imgHeight); 2.630 + } 2.631 + 2.632 + /** 2.633 + * Sets the state of the QuickTimeOutpuStream to started. 2.634 + * <p> 2.635 + * If the state is changed by this method, the prolog is 2.636 + * written. 2.637 + */ 2.638 + private void ensureStarted() throws IOException { 2.639 + if (state != States.STARTED) { 2.640 + new Date(); 2.641 + writeProlog(); 2.642 + state = States.STARTED; 2.643 + } 2.644 + } 2.645 + 2.646 + /** 2.647 + * Writes a frame to the video track. 2.648 + * <p> 2.649 + * If the dimension of the video track has not been specified yet, it 2.650 + * is derived from the first buffered image added to the AVIOutputStream. 2.651 + * 2.652 + * @param image The frame image. 2.653 + * 2.654 + * @throws IllegalArgumentException if the duration is less than 1, or 2.655 + * if the dimension of the frame does not match the dimension of the video 2.656 + * track. 2.657 + * @throws IOException if writing the image failed. 2.658 + */ 2.659 + public void writeFrame(BufferedImage image) throws IOException { 2.660 + ensureOpen(); 2.661 + ensureStarted(); 2.662 + 2.663 + // Get the dimensions of the first image 2.664 + if (imgWidth == -1) { 2.665 + imgWidth = image.getWidth(); 2.666 + imgHeight = image.getHeight(); 2.667 + } else { 2.668 + // The dimension of the image must match the dimension of the video track 2.669 + if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { 2.670 + throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() 2.671 + + "] (width=" + image.getWidth() + ", height=" + image.getHeight() 2.672 + + ") differs from image[0] (width=" 2.673 + + imgWidth + ", height=" + imgHeight); 2.674 + } 2.675 + } 2.676 + 2.677 + DataChunk videoFrameChunk; 2.678 + long offset = getRelativeStreamPosition(); 2.679 + boolean isSync = true; 2.680 + switch (videoFormat) { 2.681 + case RAW: { 2.682 + switch (imgDepth) { 2.683 + case 4: { 2.684 + IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); 2.685 + int[] imgRGBs = new int[16]; 2.686 + imgPalette.getRGBs(imgRGBs); 2.687 + int[] previousRGBs = new int[16]; 2.688 + if (previousPalette == null) { 2.689 + previousPalette = palette; 2.690 + } 2.691 + previousPalette.getRGBs(previousRGBs); 2.692 + if (!Arrays.equals(imgRGBs, previousRGBs)) { 2.693 + previousPalette = imgPalette; 2.694 + DataChunk paletteChangeChunk = new DataChunk("00pc"); 2.695 + /* 2.696 + int first = imgPalette.getMapSize(); 2.697 + int last = -1; 2.698 + for (int i = 0; i < 16; i++) { 2.699 + if (previousRGBs[i] != imgRGBs[i] && i < first) { 2.700 + first = i; 2.701 + } 2.702 + if (previousRGBs[i] != imgRGBs[i] && i > last) { 2.703 + last = i; 2.704 + } 2.705 + }*/ 2.706 + int first = 0; 2.707 + int last = imgPalette.getMapSize() - 1; 2.708 + /* 2.709 + * typedef struct { 2.710 + BYTE bFirstEntry; 2.711 + BYTE bNumEntries; 2.712 + WORD wFlags; 2.713 + PALETTEENTRY peNew[]; 2.714 + } AVIPALCHANGE; 2.715 + * 2.716 + * typedef struct tagPALETTEENTRY { 2.717 + BYTE peRed; 2.718 + BYTE peGreen; 2.719 + BYTE peBlue; 2.720 + BYTE peFlags; 2.721 + } PALETTEENTRY; 2.722 + */ 2.723 + DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); 2.724 + pOut.writeByte(first);//bFirstEntry 2.725 + pOut.writeByte(last - first + 1);//bNumEntries 2.726 + pOut.writeShort(0);//wFlags 2.727 + 2.728 + for (int i = first; i <= last; i++) { 2.729 + pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red 2.730 + pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green 2.731 + pOut.writeByte(imgRGBs[i] & 0xff); // blue 2.732 + pOut.writeByte(0); // reserved*/ 2.733 + } 2.734 + 2.735 + moviChunk.add(paletteChangeChunk); 2.736 + paletteChangeChunk.finish(); 2.737 + long length = getRelativeStreamPosition() - offset; 2.738 + videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); 2.739 + offset = getRelativeStreamPosition(); 2.740 + } 2.741 + 2.742 + videoFrameChunk = new DataChunk("00db"); 2.743 + byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); 2.744 + byte[] rgb4 = new byte[imgWidth / 2]; 2.745 + for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down 2.746 + for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { 2.747 + rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); 2.748 + } 2.749 + videoFrameChunk.getOutputStream().write(rgb4); 2.750 + } 2.751 + break; 2.752 + } 2.753 + case 8: { 2.754 + IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); 2.755 + int[] imgRGBs = new int[256]; 2.756 + imgPalette.getRGBs(imgRGBs); 2.757 + int[] previousRGBs = new int[256]; 2.758 + if (previousPalette == null) { 2.759 + previousPalette = palette; 2.760 + } 2.761 + previousPalette.getRGBs(previousRGBs); 2.762 + if (!Arrays.equals(imgRGBs, previousRGBs)) { 2.763 + previousPalette = imgPalette; 2.764 + DataChunk paletteChangeChunk = new DataChunk("00pc"); 2.765 + /* 2.766 + int first = imgPalette.getMapSize(); 2.767 + int last = -1; 2.768 + for (int i = 0; i < 16; i++) { 2.769 + if (previousRGBs[i] != imgRGBs[i] && i < first) { 2.770 + first = i; 2.771 + } 2.772 + if (previousRGBs[i] != imgRGBs[i] && i > last) { 2.773 + last = i; 2.774 + } 2.775 + }*/ 2.776 + int first = 0; 2.777 + int last = imgPalette.getMapSize() - 1; 2.778 + /* 2.779 + * typedef struct { 2.780 + BYTE bFirstEntry; 2.781 + BYTE bNumEntries; 2.782 + WORD wFlags; 2.783 + PALETTEENTRY peNew[]; 2.784 + } AVIPALCHANGE; 2.785 + * 2.786 + * typedef struct tagPALETTEENTRY { 2.787 + BYTE peRed; 2.788 + BYTE peGreen; 2.789 + BYTE peBlue; 2.790 + BYTE peFlags; 2.791 + } PALETTEENTRY; 2.792 + */ 2.793 + DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); 2.794 + pOut.writeByte(first);//bFirstEntry 2.795 + pOut.writeByte(last - first + 1);//bNumEntries 2.796 + pOut.writeShort(0);//wFlags 2.797 + 2.798 + for (int i = first; i <= last; i++) { 2.799 + pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red 2.800 + pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green 2.801 + pOut.writeByte(imgRGBs[i] & 0xff); // blue 2.802 + pOut.writeByte(0); // reserved*/ 2.803 + } 2.804 + 2.805 + moviChunk.add(paletteChangeChunk); 2.806 + paletteChangeChunk.finish(); 2.807 + long length = getRelativeStreamPosition() - offset; 2.808 + videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); 2.809 + offset = getRelativeStreamPosition(); 2.810 + } 2.811 + 2.812 + videoFrameChunk = new DataChunk("00db"); 2.813 + byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); 2.814 + for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down 2.815 + videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); 2.816 + } 2.817 + break; 2.818 + } 2.819 + default: { 2.820 + videoFrameChunk = new DataChunk("00db"); 2.821 + WritableRaster raster = image.getRaster(); 2.822 + int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data 2.823 + byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data 2.824 + for (int y = imgHeight - 1; y >= 0; --y) { // Upside down 2.825 + raster.getPixels(0, y, imgWidth, 1, raw); 2.826 + for (int x = 0, n = imgWidth * 3; x < n; x += 3) { 2.827 + bytes[x + 2] = (byte) raw[x]; // Blue 2.828 + bytes[x + 1] = (byte) raw[x + 1]; // Green 2.829 + bytes[x] = (byte) raw[x + 2]; // Red 2.830 + } 2.831 + videoFrameChunk.getOutputStream().write(bytes); 2.832 + } 2.833 + break; 2.834 + } 2.835 + } 2.836 + break; 2.837 + } 2.838 + 2.839 + case JPG: { 2.840 + videoFrameChunk = new DataChunk("00dc"); 2.841 + ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); 2.842 + ImageWriteParam iwParam = iw.getDefaultWriteParam(); 2.843 + iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); 2.844 + iwParam.setCompressionQuality(quality); 2.845 + MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); 2.846 + iw.setOutput(imgOut); 2.847 + IIOImage img = new IIOImage(image, null, null); 2.848 + iw.write(null, img, iwParam); 2.849 + iw.dispose(); 2.850 + break; 2.851 + } 2.852 + case PNG: 2.853 + default: { 2.854 + videoFrameChunk = new DataChunk("00dc"); 2.855 + ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); 2.856 + ImageWriteParam iwParam = iw.getDefaultWriteParam(); 2.857 + MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); 2.858 + iw.setOutput(imgOut); 2.859 + IIOImage img = new IIOImage(image, null, null); 2.860 + iw.write(null, img, iwParam); 2.861 + iw.dispose(); 2.862 + break; 2.863 + } 2.864 + } 2.865 + long length = getRelativeStreamPosition() - offset; 2.866 + moviChunk.add(videoFrameChunk); 2.867 + videoFrameChunk.finish(); 2.868 + 2.869 + videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); 2.870 + if (getRelativeStreamPosition() > 1L << 32) { 2.871 + throw new IOException("AVI file is larger than 4 GB"); 2.872 + } 2.873 + } 2.874 + 2.875 + /** 2.876 + * Writes a frame from a file to the video track. 2.877 + * <p> 2.878 + * This method does not inspect the contents of the file. 2.879 + * For example, Its your responsibility to only add JPG files if you have 2.880 + * chosen the JPEG video format. 2.881 + * <p> 2.882 + * If you add all frames from files or from input streams, then you 2.883 + * have to explicitly set the dimension of the video track before you 2.884 + * call finish() or close(). 2.885 + * 2.886 + * @param file The file which holds the image data. 2.887 + * 2.888 + * @throws IllegalStateException if the duration is less than 1. 2.889 + * @throws IOException if writing the image failed. 2.890 + */ 2.891 + public void writeFrame(File file) throws IOException { 2.892 + FileInputStream in = null; 2.893 + try { 2.894 + in = new FileInputStream(file); 2.895 + writeFrame(in); 2.896 + } finally { 2.897 + if (in != null) { 2.898 + in.close(); 2.899 + } 2.900 + } 2.901 + } 2.902 + 2.903 + /** 2.904 + * Writes a frame to the video track. 2.905 + * <p> 2.906 + * This method does not inspect the contents of the file. 2.907 + * For example, its your responsibility to only add JPG files if you have 2.908 + * chosen the JPEG video format. 2.909 + * <p> 2.910 + * If you add all frames from files or from input streams, then you 2.911 + * have to explicitly set the dimension of the video track before you 2.912 + * call finish() or close(). 2.913 + * 2.914 + * @param in The input stream which holds the image data. 2.915 + * 2.916 + * @throws IllegalArgumentException if the duration is less than 1. 2.917 + * @throws IOException if writing the image failed. 2.918 + */ 2.919 + public void writeFrame(InputStream in) throws IOException { 2.920 + ensureOpen(); 2.921 + ensureStarted(); 2.922 + 2.923 + DataChunk videoFrameChunk = new DataChunk( 2.924 + videoFormat == VideoFormat.RAW ? "00db" : "00dc"); 2.925 + moviChunk.add(videoFrameChunk); 2.926 + OutputStream mdatOut = videoFrameChunk.getOutputStream(); 2.927 + long offset = getRelativeStreamPosition(); 2.928 + byte[] buf = new byte[512]; 2.929 + int len; 2.930 + while ((len = in.read(buf)) != -1) { 2.931 + mdatOut.write(buf, 0, len); 2.932 + } 2.933 + long length = getRelativeStreamPosition() - offset; 2.934 + videoFrameChunk.finish(); 2.935 + videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); 2.936 + if (getRelativeStreamPosition() > 1L << 32) { 2.937 + throw new IOException("AVI file is larger than 4 GB"); 2.938 + } 2.939 + } 2.940 + 2.941 + /** 2.942 + * Closes the movie file as well as the stream being filtered. 2.943 + * 2.944 + * @exception IOException if an I/O error has occurred 2.945 + */ 2.946 + public void close() throws IOException { 2.947 + if (state == States.STARTED) { 2.948 + finish(); 2.949 + } 2.950 + if (state != States.CLOSED) { 2.951 + out.close(); 2.952 + state = States.CLOSED; 2.953 + } 2.954 + } 2.955 + 2.956 + /** 2.957 + * Finishes writing the contents of the AVI output stream without closing 2.958 + * the underlying stream. Use this method when applying multiple filters 2.959 + * in succession to the same output stream. 2.960 + * 2.961 + * @exception IllegalStateException if the dimension of the video track 2.962 + * has not been specified or determined yet. 2.963 + * @exception IOException if an I/O exception has occurred 2.964 + */ 2.965 + public void finish() throws IOException { 2.966 + ensureOpen(); 2.967 + if (state != States.FINISHED) { 2.968 + if (imgWidth == -1 || imgHeight == -1) { 2.969 + throw new IllegalStateException("image width and height must be specified"); 2.970 + } 2.971 + 2.972 + moviChunk.finish(); 2.973 + writeEpilog(); 2.974 + state = States.FINISHED; 2.975 + imgWidth = imgHeight = -1; 2.976 + } 2.977 + } 2.978 + 2.979 + /** 2.980 + * Check to make sure that this stream has not been closed 2.981 + */ 2.982 + private void ensureOpen() throws IOException { 2.983 + if (state == States.CLOSED) { 2.984 + throw new IOException("Stream closed"); 2.985 + } 2.986 + } 2.987 + 2.988 + /** Gets the position relative to the beginning of the QuickTime stream. 2.989 + * <p> 2.990 + * Usually this value is equal to the stream position of the underlying 2.991 + * ImageOutputStream, but can be larger if the underlying stream already 2.992 + * contained data. 2.993 + * 2.994 + * @return The relative stream position. 2.995 + * @throws IOException 2.996 + */ 2.997 + private long getRelativeStreamPosition() throws IOException { 2.998 + return out.getStreamPosition() - streamOffset; 2.999 + } 2.1000 + 2.1001 + /** Seeks relative to the beginning of the QuickTime stream. 2.1002 + * <p> 2.1003 + * Usually this equal to seeking in the underlying ImageOutputStream, but 2.1004 + * can be different if the underlying stream already contained data. 2.1005 + * 2.1006 + */ 2.1007 + private void seekRelative(long newPosition) throws IOException { 2.1008 + out.seek(newPosition + streamOffset); 2.1009 + } 2.1010 + 2.1011 + private void writeProlog() throws IOException { 2.1012 + // The file has the following structure: 2.1013 + // 2.1014 + // .RIFF AVI 2.1015 + // ..avih (AVI Header Chunk) 2.1016 + // ..LIST strl 2.1017 + // ...strh (Stream Header Chunk) 2.1018 + // ...strf (Stream Format Chunk) 2.1019 + // ..LIST movi 2.1020 + // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) 2.1021 + // ..idx1 (List of video data chunks and their location in the file) 2.1022 + 2.1023 + // The RIFF AVI Chunk holds the complete movie 2.1024 + aviChunk = new CompositeChunk("RIFF", "AVI "); 2.1025 + CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); 2.1026 + 2.1027 + // Write empty AVI Main Header Chunk - we fill the data in later 2.1028 + aviChunk.add(hdrlChunk); 2.1029 + avihChunk = new FixedSizeDataChunk("avih", 56); 2.1030 + avihChunk.seekToEndOfChunk(); 2.1031 + hdrlChunk.add(avihChunk); 2.1032 + 2.1033 + CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); 2.1034 + hdrlChunk.add(strlChunk); 2.1035 + 2.1036 + // Write empty AVI Stream Header Chunk - we fill the data in later 2.1037 + strhChunk = new FixedSizeDataChunk("strh", 56); 2.1038 + strhChunk.seekToEndOfChunk(); 2.1039 + strlChunk.add(strhChunk); 2.1040 + strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); 2.1041 + strfChunk.seekToEndOfChunk(); 2.1042 + strlChunk.add(strfChunk); 2.1043 + 2.1044 + moviChunk = new CompositeChunk("LIST", "movi"); 2.1045 + aviChunk.add(moviChunk); 2.1046 + 2.1047 + 2.1048 + } 2.1049 + 2.1050 + private void writeEpilog() throws IOException { 2.1051 + 2.1052 + long bufferSize = 0; 2.1053 + for (Sample s : videoFrames) { 2.1054 + if (s.length > bufferSize) { 2.1055 + bufferSize = s.length; 2.1056 + } 2.1057 + } 2.1058 + 2.1059 + 2.1060 + DataChunkOutputStream d; 2.1061 + 2.1062 + /* Create Idx1 Chunk and write data 2.1063 + * ------------- 2.1064 + typedef struct _avioldindex { 2.1065 + FOURCC fcc; 2.1066 + DWORD cb; 2.1067 + struct _avioldindex_entry { 2.1068 + DWORD dwChunkId; 2.1069 + DWORD dwFlags; 2.1070 + DWORD dwOffset; 2.1071 + DWORD dwSize; 2.1072 + } aIndex[]; 2.1073 + } AVIOLDINDEX; 2.1074 + */ 2.1075 + DataChunk idx1Chunk = new DataChunk("idx1"); 2.1076 + aviChunk.add(idx1Chunk); 2.1077 + d = idx1Chunk.getOutputStream(); 2.1078 + long moviListOffset = moviChunk.offset + 8; 2.1079 + //moviListOffset = 0; 2.1080 + for (Sample f : videoFrames) { 2.1081 + 2.1082 + d.writeType(f.chunkType); // dwChunkId 2.1083 + // Specifies a FOURCC that identifies a stream in the AVI file. The 2.1084 + // FOURCC must have the form 'xxyy' where xx is the stream number and yy 2.1085 + // is a two-character code that identifies the contents of the stream: 2.1086 + // 2.1087 + // Two-character code Description 2.1088 + // db Uncompressed video frame 2.1089 + // dc Compressed video frame 2.1090 + // pc Palette change 2.1091 + // wb Audio data 2.1092 + 2.1093 + d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// 2.1094 + | (f.isSync ? 0x10 : 0x0)); // dwFlags 2.1095 + // Specifies a bitwise combination of zero or more of the following 2.1096 + // flags: 2.1097 + // 2.1098 + // Value Name Description 2.1099 + // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. 2.1100 + // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. 2.1101 + // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the 2.1102 + // stream. For example, this flag should be set for 2.1103 + // palette changes. 2.1104 + 2.1105 + d.writeUInt(f.offset - moviListOffset); // dwOffset 2.1106 + // Specifies the location of the data chunk in the file. The value 2.1107 + // should be specified as an offset, in bytes, from the start of the 2.1108 + // 'movi' list; however, in some AVI files it is given as an offset from 2.1109 + // the start of the file. 2.1110 + 2.1111 + d.writeUInt(f.length); // dwSize 2.1112 + // Specifies the size of the data chunk, in bytes. 2.1113 + } 2.1114 + idx1Chunk.finish(); 2.1115 + 2.1116 + /* Write Data into AVI Main Header Chunk 2.1117 + * ------------- 2.1118 + * The AVIMAINHEADER structure defines global information in an AVI file. 2.1119 + * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx 2.1120 + typedef struct _avimainheader { 2.1121 + FOURCC fcc; 2.1122 + DWORD cb; 2.1123 + DWORD dwMicroSecPerFrame; 2.1124 + DWORD dwMaxBytesPerSec; 2.1125 + DWORD dwPaddingGranularity; 2.1126 + DWORD dwFlags; 2.1127 + DWORD dwTotalFrames; 2.1128 + DWORD dwInitialFrames; 2.1129 + DWORD dwStreams; 2.1130 + DWORD dwSuggestedBufferSize; 2.1131 + DWORD dwWidth; 2.1132 + DWORD dwHeight; 2.1133 + DWORD dwReserved[4]; 2.1134 + } AVIMAINHEADER; */ 2.1135 + avihChunk.seekToStartOfData(); 2.1136 + d = avihChunk.getOutputStream(); 2.1137 + 2.1138 + d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame 2.1139 + // Specifies the number of microseconds between frames. 2.1140 + // This value indicates the overall timing for the file. 2.1141 + 2.1142 + d.writeUInt(0); // dwMaxBytesPerSec 2.1143 + // Specifies the approximate maximum data rate of the file. 2.1144 + // This value indicates the number of bytes per second the system 2.1145 + // must handle to present an AVI sequence as specified by the other 2.1146 + // parameters contained in the main header and stream header chunks. 2.1147 + 2.1148 + d.writeUInt(0); // dwPaddingGranularity 2.1149 + // Specifies the alignment for data, in bytes. Pad the data to multiples 2.1150 + // of this value. 2.1151 + 2.1152 + d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) 2.1153 + // Contains a bitwise combination of zero or more of the following 2.1154 + // flags: 2.1155 + // 2.1156 + // Value Name Description 2.1157 + // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. 2.1158 + // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the 2.1159 + // index, rather than the physical ordering of the 2.1160 + // chunks in the file, to determine the order of 2.1161 + // presentation of the data. For example, this flag 2.1162 + // could be used to create a list of frames for 2.1163 + // editing. 2.1164 + // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. 2.1165 + // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially 2.1166 + // allocated file used for capturing real-time 2.1167 + // video. Applications should warn the user before 2.1168 + // writing over a file with this flag set because 2.1169 + // the user probably defragmented this file. 2.1170 + // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted 2.1171 + // data and software. When this flag is used, 2.1172 + // software should not permit the data to be 2.1173 + // duplicated. 2.1174 + 2.1175 + d.writeUInt(videoFrames.size()); // dwTotalFrames 2.1176 + // Specifies the total number of frames of data in the file. 2.1177 + 2.1178 + d.writeUInt(0); // dwInitialFrames 2.1179 + // Specifies the initial frame for interleaved files. Noninterleaved 2.1180 + // files should specify zero. If you are creating interleaved files, 2.1181 + // specify the number of frames in the file prior to the initial frame 2.1182 + // of the AVI sequence in this member. 2.1183 + // To give the audio driver enough audio to work with, the audio data in 2.1184 + // an interleaved file must be skewed from the video data. Typically, 2.1185 + // the audio data should be moved forward enough frames to allow 2.1186 + // approximately 0.75 seconds of audio data to be preloaded. The 2.1187 + // dwInitialRecords member should be set to the number of frames the 2.1188 + // audio is skewed. Also set the same value for the dwInitialFrames 2.1189 + // member of the AVISTREAMHEADER structure in the audio stream header 2.1190 + 2.1191 + d.writeUInt(1); // dwStreams 2.1192 + // Specifies the number of streams in the file. For example, a file with 2.1193 + // audio and video has two streams. 2.1194 + 2.1195 + d.writeUInt(bufferSize); // dwSuggestedBufferSize 2.1196 + // Specifies the suggested buffer size for reading the file. Generally, 2.1197 + // this size should be large enough to contain the largest chunk in the 2.1198 + // file. If set to zero, or if it is too small, the playback software 2.1199 + // will have to reallocate memory during playback, which will reduce 2.1200 + // performance. For an interleaved file, the buffer size should be large 2.1201 + // enough to read an entire record, and not just a chunk. 2.1202 + 2.1203 + 2.1204 + d.writeUInt(imgWidth); // dwWidth 2.1205 + // Specifies the width of the AVI file in pixels. 2.1206 + 2.1207 + d.writeUInt(imgHeight); // dwHeight 2.1208 + // Specifies the height of the AVI file in pixels. 2.1209 + 2.1210 + d.writeUInt(0); // dwReserved[0] 2.1211 + d.writeUInt(0); // dwReserved[1] 2.1212 + d.writeUInt(0); // dwReserved[2] 2.1213 + d.writeUInt(0); // dwReserved[3] 2.1214 + // Reserved. Set this array to zero. 2.1215 + 2.1216 + /* Write Data into AVI Stream Header Chunk 2.1217 + * ------------- 2.1218 + * The AVISTREAMHEADER structure contains information about one stream 2.1219 + * in an AVI file. 2.1220 + * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx 2.1221 + typedef struct _avistreamheader { 2.1222 + FOURCC fcc; 2.1223 + DWORD cb; 2.1224 + FOURCC fccType; 2.1225 + FOURCC fccHandler; 2.1226 + DWORD dwFlags; 2.1227 + WORD wPriority; 2.1228 + WORD wLanguage; 2.1229 + DWORD dwInitialFrames; 2.1230 + DWORD dwScale; 2.1231 + DWORD dwRate; 2.1232 + DWORD dwStart; 2.1233 + DWORD dwLength; 2.1234 + DWORD dwSuggestedBufferSize; 2.1235 + DWORD dwQuality; 2.1236 + DWORD dwSampleSize; 2.1237 + struct { 2.1238 + short int left; 2.1239 + short int top; 2.1240 + short int right; 2.1241 + short int bottom; 2.1242 + } rcFrame; 2.1243 + } AVISTREAMHEADER; 2.1244 + */ 2.1245 + strhChunk.seekToStartOfData(); 2.1246 + d = strhChunk.getOutputStream(); 2.1247 + d.writeType("vids"); // fccType - vids for video stream 2.1248 + // Contains a FOURCC that specifies the type of the data contained in 2.1249 + // the stream. The following standard AVI values for video and audio are 2.1250 + // defined: 2.1251 + // 2.1252 + // FOURCC Description 2.1253 + // 'auds' Audio stream 2.1254 + // 'mids' MIDI stream 2.1255 + // 'txts' Text stream 2.1256 + // 'vids' Video stream 2.1257 + 2.1258 + switch (videoFormat) { 2.1259 + case RAW: 2.1260 + d.writeType("DIB "); // fccHandler - DIB for Raw RGB 2.1261 + break; 2.1262 + case RLE: 2.1263 + d.writeType("RLE "); // fccHandler - Microsoft RLE 2.1264 + break; 2.1265 + case JPG: 2.1266 + d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG 2.1267 + break; 2.1268 + case PNG: 2.1269 + default: 2.1270 + d.writeType("png "); // fccHandler - png for PNG 2.1271 + break; 2.1272 + } 2.1273 + // Optionally, contains a FOURCC that identifies a specific data 2.1274 + // handler. The data handler is the preferred handler for the stream. 2.1275 + // For audio and video streams, this specifies the codec for decoding 2.1276 + // the stream. 2.1277 + 2.1278 + if (imgDepth <= 8) { 2.1279 + d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES 2.1280 + } else { 2.1281 + d.writeUInt(0); // dwFlags 2.1282 + } 2.1283 + 2.1284 + // Contains any flags for the data stream. The bits in the high-order 2.1285 + // word of these flags are specific to the type of data contained in the 2.1286 + // stream. The following standard flags are defined: 2.1287 + // 2.1288 + // Value Name Description 2.1289 + // AVISF_DISABLED 0x00000001 Indicates this stream should not 2.1290 + // be enabled by default. 2.1291 + // AVISF_VIDEO_PALCHANGES 0x00010000 2.1292 + // Indicates this video stream contains 2.1293 + // palette changes. This flag warns the playback 2.1294 + // software that it will need to animate the 2.1295 + // palette. 2.1296 + 2.1297 + d.writeUShort(0); // wPriority 2.1298 + // Specifies priority of a stream type. For example, in a file with 2.1299 + // multiple audio streams, the one with the highest priority might be 2.1300 + // the default stream. 2.1301 + 2.1302 + d.writeUShort(0); // wLanguage 2.1303 + // Language tag. 2.1304 + 2.1305 + d.writeUInt(0); // dwInitialFrames 2.1306 + // Specifies how far audio data is skewed ahead of the video frames in 2.1307 + // interleaved files. Typically, this is about 0.75 seconds. If you are 2.1308 + // creating interleaved files, specify the number of frames in the file 2.1309 + // prior to the initial frame of the AVI sequence in this member. For 2.1310 + // more information, see the remarks for the dwInitialFrames member of 2.1311 + // the AVIMAINHEADER structure. 2.1312 + 2.1313 + d.writeUInt(timeScale); // dwScale 2.1314 + // Used with dwRate to specify the time scale that this stream will use. 2.1315 + // Dividing dwRate by dwScale gives the number of samples per second. 2.1316 + // For video streams, this is the frame rate. For audio streams, this 2.1317 + // rate corresponds to the time needed to play nBlockAlign bytes of 2.1318 + // audio, which for PCM audio is the just the sample rate. 2.1319 + 2.1320 + d.writeUInt(frameRate); // dwRate 2.1321 + // See dwScale. 2.1322 + 2.1323 + d.writeUInt(0); // dwStart 2.1324 + // Specifies the starting time for this stream. The units are defined by 2.1325 + // the dwRate and dwScale members in the main file header. Usually, this 2.1326 + // is zero, but it can specify a delay time for a stream that does not 2.1327 + // start concurrently with the file. 2.1328 + 2.1329 + d.writeUInt(videoFrames.size()); // dwLength 2.1330 + // Specifies the length of this stream. The units are defined by the 2.1331 + // dwRate and dwScale members of the stream's header. 2.1332 + 2.1333 + d.writeUInt(bufferSize); // dwSuggestedBufferSize 2.1334 + // Specifies how large a buffer should be used to read this stream. 2.1335 + // Typically, this contains a value corresponding to the largest chunk 2.1336 + // present in the stream. Using the correct buffer size makes playback 2.1337 + // more efficient. Use zero if you do not know the correct buffer size. 2.1338 + 2.1339 + d.writeInt(-1); // dwQuality 2.1340 + // Specifies an indicator of the quality of the data in the stream. 2.1341 + // Quality is represented as a number between 0 and 10,000. 2.1342 + // For compressed data, this typically represents the value of the 2.1343 + // quality parameter passed to the compression software. If set to –1, 2.1344 + // drivers use the default quality value. 2.1345 + 2.1346 + d.writeUInt(0); // dwSampleSize 2.1347 + // Specifies the size of a single sample of data. This is set to zero 2.1348 + // if the samples can vary in size. If this number is nonzero, then 2.1349 + // multiple samples of data can be grouped into a single chunk within 2.1350 + // the file. If it is zero, each sample of data (such as a video frame) 2.1351 + // must be in a separate chunk. For video streams, this number is 2.1352 + // typically zero, although it can be nonzero if all video frames are 2.1353 + // the same size. For audio streams, this number should be the same as 2.1354 + // the nBlockAlign member of the WAVEFORMATEX structure describing the 2.1355 + // audio. 2.1356 + 2.1357 + d.writeUShort(0); // rcFrame.left 2.1358 + d.writeUShort(0); // rcFrame.top 2.1359 + d.writeUShort(imgWidth); // rcFrame.right 2.1360 + d.writeUShort(imgHeight); // rcFrame.bottom 2.1361 + // Specifies the destination rectangle for a text or video stream within 2.1362 + // the movie rectangle specified by the dwWidth and dwHeight members of 2.1363 + // the AVI main header structure. The rcFrame member is typically used 2.1364 + // in support of multiple video streams. Set this rectangle to the 2.1365 + // coordinates corresponding to the movie rectangle to update the whole 2.1366 + // movie rectangle. Units for this member are pixels. The upper-left 2.1367 + // corner of the destination rectangle is relative to the upper-left 2.1368 + // corner of the movie rectangle. 2.1369 + 2.1370 + /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk 2.1371 + /* ------------- 2.1372 + * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx 2.1373 + typedef struct tagBITMAPINFOHEADER { 2.1374 + DWORD biSize; 2.1375 + LONG biWidth; 2.1376 + LONG biHeight; 2.1377 + WORD biPlanes; 2.1378 + WORD biBitCount; 2.1379 + DWORD biCompression; 2.1380 + DWORD biSizeImage; 2.1381 + LONG biXPelsPerMeter; 2.1382 + LONG biYPelsPerMeter; 2.1383 + DWORD biClrUsed; 2.1384 + DWORD biClrImportant; 2.1385 + } BITMAPINFOHEADER; 2.1386 + */ 2.1387 + strfChunk.seekToStartOfData(); 2.1388 + d = strfChunk.getOutputStream(); 2.1389 + d.writeUInt(40); // biSize 2.1390 + // Specifies the number of bytes required by the structure. This value 2.1391 + // does not include the size of the color table or the size of the color 2.1392 + // masks, if they are appended to the end of structure. 2.1393 + 2.1394 + d.writeInt(imgWidth); // biWidth 2.1395 + // Specifies the width of the bitmap, in pixels. 2.1396 + 2.1397 + d.writeInt(imgHeight); // biHeight 2.1398 + // Specifies the height of the bitmap, in pixels. 2.1399 + // 2.1400 + // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is 2.1401 + // a bottom-up DIB with the origin at the lower left corner. If biHeight 2.1402 + // is negative, the bitmap is a top-down DIB with the origin at the 2.1403 + // upper left corner. 2.1404 + // For YUV bitmaps, the bitmap is always top-down, regardless of the 2.1405 + // sign of biHeight. Decoders should offer YUV formats with postive 2.1406 + // biHeight, but for backward compatibility they should accept YUV 2.1407 + // formats with either positive or negative biHeight. 2.1408 + // For compressed formats, biHeight must be positive, regardless of 2.1409 + // image orientation. 2.1410 + 2.1411 + d.writeShort(1); // biPlanes 2.1412 + // Specifies the number of planes for the target device. This value must 2.1413 + // be set to 1. 2.1414 + 2.1415 + d.writeShort(imgDepth); // biBitCount 2.1416 + // Specifies the number of bits per pixel (bpp). For uncompressed 2.1417 + // formats, this value is the average number of bits per pixel. For 2.1418 + // compressed formats, this value is the implied bit depth of the 2.1419 + // uncompressed image, after the image has been decoded. 2.1420 + 2.1421 + switch (videoFormat) { 2.1422 + case RAW: 2.1423 + default: 2.1424 + d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB 2.1425 + break; 2.1426 + case RLE: 2.1427 + if (imgDepth == 8) { 2.1428 + d.writeInt(1); // biCompression - BI_RLE8 2.1429 + } else if (imgDepth == 4) { 2.1430 + d.writeInt(2); // biCompression - BI_RLE4 2.1431 + } else { 2.1432 + throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); 2.1433 + } 2.1434 + break; 2.1435 + case JPG: 2.1436 + d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG 2.1437 + break; 2.1438 + case PNG: 2.1439 + d.writeType("png "); // biCompression - png for PNG 2.1440 + break; 2.1441 + } 2.1442 + // For compressed video and YUV formats, this member is a FOURCC code, 2.1443 + // specified as a DWORD in little-endian order. For example, YUYV video 2.1444 + // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC 2.1445 + // Codes. 2.1446 + // 2.1447 + // For uncompressed RGB formats, the following values are possible: 2.1448 + // 2.1449 + // Value Description 2.1450 + // BI_RGB 0x00000000 Uncompressed RGB. 2.1451 + // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. 2.1452 + // Valid for 16-bpp and 32-bpp bitmaps. 2.1453 + // 2.1454 + // Note that BI_JPG and BI_PNG are not valid video formats. 2.1455 + // 2.1456 + // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is 2.1457 + // always RGB 555. If biCompression equals BI_BITFIELDS, the format is 2.1458 + // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE 2.1459 + // structure to determine the specific RGB type. 2.1460 + 2.1461 + switch (videoFormat) { 2.1462 + case RAW: 2.1463 + d.writeInt(0); // biSizeImage 2.1464 + break; 2.1465 + case RLE: 2.1466 + case JPG: 2.1467 + case PNG: 2.1468 + default: 2.1469 + if (imgDepth == 4) { 2.1470 + d.writeInt(imgWidth * imgHeight / 2); // biSizeImage 2.1471 + } else { 2.1472 + int bytesPerPixel = Math.max(1, imgDepth / 8); 2.1473 + d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage 2.1474 + } 2.1475 + break; 2.1476 + } 2.1477 + // Specifies the size, in bytes, of the image. This can be set to 0 for 2.1478 + // uncompressed RGB bitmaps. 2.1479 + 2.1480 + d.writeInt(0); // biXPelsPerMeter 2.1481 + // Specifies the horizontal resolution, in pixels per meter, of the 2.1482 + // target device for the bitmap. 2.1483 + 2.1484 + d.writeInt(0); // biYPelsPerMeter 2.1485 + // Specifies the vertical resolution, in pixels per meter, of the target 2.1486 + // device for the bitmap. 2.1487 + 2.1488 + d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed 2.1489 + // Specifies the number of color indices in the color table that are 2.1490 + // actually used by the bitmap. 2.1491 + 2.1492 + d.writeInt(0); // biClrImportant 2.1493 + // Specifies the number of color indices that are considered important 2.1494 + // for displaying the bitmap. If this value is zero, all colors are 2.1495 + // important. 2.1496 + 2.1497 + if (palette != null) { 2.1498 + for (int i = 0, n = palette.getMapSize(); i < n; ++i) { 2.1499 + /* 2.1500 + * typedef struct tagRGBQUAD { 2.1501 + BYTE rgbBlue; 2.1502 + BYTE rgbGreen; 2.1503 + BYTE rgbRed; 2.1504 + BYTE rgbReserved; // This member is reserved and must be zero. 2.1505 + } RGBQUAD; 2.1506 + */ 2.1507 + d.write(palette.getBlue(i)); 2.1508 + d.write(palette.getGreen(i)); 2.1509 + d.write(palette.getRed(i)); 2.1510 + d.write(0); 2.1511 + } 2.1512 + } 2.1513 + 2.1514 + 2.1515 + // ----------------- 2.1516 + aviChunk.finish(); 2.1517 + } 2.1518 +}
3.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 3.2 +++ b/src/ca/randelshofer/DataChunkOutputStream.java Wed Oct 26 09:38:27 2011 -0700 3.3 @@ -0,0 +1,217 @@ 3.4 +/** 3.5 + * @(#)DataChunkOutputStream.java 1.1 2011-01-17 3.6 + * 3.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. 3.8 + * All rights reserved. 3.9 + * 3.10 + * You may not use, copy or modify this file, except in compliance with the 3.11 + * license agreement you entered into with Werner Randelshofer. 3.12 + * For details see accompanying license terms. 3.13 + */ 3.14 +package ca.randelshofer; 3.15 + 3.16 +import java.io.*; 3.17 + 3.18 +/** 3.19 + * This output stream filter supports common data types used inside 3.20 + * of AVI RIFF Data Chunks. 3.21 + * 3.22 + * @author Werner Randelshofer 3.23 + * @version 1.1 2011-01-17 Adds functionality for blocking flush and close. 3.24 + * <br>1.0.1 2010-04-05 Removed unused constants. 3.25 + * <br>1.0 2008-08-11 Created. 3.26 + */ 3.27 +public class DataChunkOutputStream extends FilterOutputStream { 3.28 + 3.29 + /** 3.30 + * The number of bytes written to the data output stream so far. 3.31 + * If this counter overflows, it will be wrapped to Integer.MAX_VALUE. 3.32 + */ 3.33 + protected long written; 3.34 + 3.35 + /** Whether flush and close request shall be forwarded to underlying stream.*/ 3.36 + private boolean forwardFlushAndClose; 3.37 + 3.38 + public DataChunkOutputStream(OutputStream out) { 3.39 + this(out,true); 3.40 + } 3.41 + public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) { 3.42 + super(out); 3.43 + this.forwardFlushAndClose=forwardFlushAndClose; 3.44 + } 3.45 + 3.46 + /** 3.47 + * Writes an chunk type identifier (4 bytes). 3.48 + * @param s A string with a length of 4 characters. 3.49 + */ 3.50 + public void writeType(String s) throws IOException { 3.51 + if (s.length() != 4) { 3.52 + throw new IllegalArgumentException("type string must have 4 characters"); 3.53 + } 3.54 + 3.55 + try { 3.56 + out.write(s.getBytes("ASCII"), 0, 4); 3.57 + incCount(4); 3.58 + } catch (UnsupportedEncodingException e) { 3.59 + throw new InternalError(e.toString()); 3.60 + } 3.61 + } 3.62 + 3.63 + /** 3.64 + * Writes out a <code>byte</code> to the underlying output stream as 3.65 + * a 1-byte value. If no exception is thrown, the counter 3.66 + * <code>written</code> is incremented by <code>1</code>. 3.67 + * 3.68 + * @param v a <code>byte</code> value to be written. 3.69 + * @exception IOException if an I/O error occurs. 3.70 + * @see java.io.FilterOutputStream#out 3.71 + */ 3.72 + public final void writeByte(int v) throws IOException { 3.73 + out.write(v); 3.74 + incCount(1); 3.75 + } 3.76 + 3.77 + /** 3.78 + * Writes <code>len</code> bytes from the specified byte array 3.79 + * starting at offset <code>off</code> to the underlying output stream. 3.80 + * If no exception is thrown, the counter <code>written</code> is 3.81 + * incremented by <code>len</code>. 3.82 + * 3.83 + * @param b the data. 3.84 + * @param off the start offset in the data. 3.85 + * @param len the number of bytes to write. 3.86 + * @exception IOException if an I/O error occurs. 3.87 + * @see java.io.FilterOutputStream#out 3.88 + */ 3.89 + @Override 3.90 + public synchronized void write(byte b[], int off, int len) 3.91 + throws IOException { 3.92 + out.write(b, off, len); 3.93 + incCount(len); 3.94 + } 3.95 + 3.96 + /** 3.97 + * Writes the specified byte (the low eight bits of the argument 3.98 + * <code>b</code>) to the underlying output stream. If no exception 3.99 + * is thrown, the counter <code>written</code> is incremented by 3.100 + * <code>1</code>. 3.101 + * <p> 3.102 + * Implements the <code>write</code> method of <code>OutputStream</code>. 3.103 + * 3.104 + * @param b the <code>byte</code> to be written. 3.105 + * @exception IOException if an I/O error occurs. 3.106 + * @see java.io.FilterOutputStream#out 3.107 + */ 3.108 + @Override 3.109 + public synchronized void write(int b) throws IOException { 3.110 + out.write(b); 3.111 + incCount(1); 3.112 + } 3.113 + 3.114 + /** 3.115 + * Writes an <code>int</code> to the underlying output stream as four 3.116 + * bytes, high byte first. If no exception is thrown, the counter 3.117 + * <code>written</code> is incremented by <code>4</code>. 3.118 + * 3.119 + * @param v an <code>int</code> to be written. 3.120 + * @exception IOException if an I/O error occurs. 3.121 + * @see java.io.FilterOutputStream#out 3.122 + */ 3.123 + public void writeInt(int v) throws IOException { 3.124 + out.write((v >>> 0) & 0xff); 3.125 + out.write((v >>> 8) & 0xff); 3.126 + out.write((v >>> 16) & 0xff); 3.127 + out.write((v >>> 24) & 0xff); 3.128 + incCount(4); 3.129 + } 3.130 + 3.131 + /** 3.132 + * Writes an unsigned 32 bit integer value. 3.133 + * 3.134 + * @param v The value 3.135 + * @throws java.io.IOException 3.136 + */ 3.137 + public void writeUInt(long v) throws IOException { 3.138 + out.write((int) ((v >>> 0) & 0xff)); 3.139 + out.write((int) ((v >>> 8) & 0xff)); 3.140 + out.write((int) ((v >>> 16) & 0xff)); 3.141 + out.write((int) ((v >>> 24) & 0xff)); 3.142 + incCount(4); 3.143 + } 3.144 + 3.145 + /** 3.146 + * Writes a signed 16 bit integer value. 3.147 + * 3.148 + * @param v The value 3.149 + * @throws java.io.IOException 3.150 + */ 3.151 + public void writeShort(int v) throws IOException { 3.152 + out.write((int) ((v >>> 0) & 0xff)); 3.153 + out.write((int) ((v >> 8) & 0xff)); 3.154 + incCount(2); 3.155 + } 3.156 + 3.157 + public void writeLong(long v) throws IOException { 3.158 + out.write((int) (v >>> 0) & 0xff); 3.159 + out.write((int) (v >>> 8) & 0xff); 3.160 + out.write((int) (v >>> 16) & 0xff); 3.161 + out.write((int) (v >>> 24) & 0xff); 3.162 + out.write((int) (v >>> 32) & 0xff); 3.163 + out.write((int) (v >>> 40) & 0xff); 3.164 + out.write((int) (v >>> 48) & 0xff); 3.165 + out.write((int) (v >>> 56) & 0xff); 3.166 + incCount(8); 3.167 + } 3.168 + 3.169 + public void writeUShort(int v) throws IOException { 3.170 + out.write((int) ((v >>> 0) & 0xff)); 3.171 + out.write((int) ((v >> 8) & 0xff)); 3.172 + incCount(2); 3.173 + } 3.174 + 3.175 + /** 3.176 + * Increases the written counter by the specified value 3.177 + * until it reaches Long.MAX_VALUE. 3.178 + */ 3.179 + protected void incCount(int value) { 3.180 + long temp = written + value; 3.181 + if (temp < 0) { 3.182 + temp = Long.MAX_VALUE; 3.183 + } 3.184 + written = temp; 3.185 + } 3.186 + 3.187 + /** 3.188 + * Returns the current value of the counter <code>written</code>, 3.189 + * the number of bytes written to this data output stream so far. 3.190 + * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. 3.191 + * 3.192 + * @return the value of the <code>written</code> field. 3.193 + * @see java.io.DataOutputStream#written 3.194 + */ 3.195 + public final long size() { 3.196 + return written; 3.197 + } 3.198 + 3.199 + /** 3.200 + * Sets the value of the counter <code>written</code> to 0. 3.201 + */ 3.202 + public void clearCount() { 3.203 + written = 0; 3.204 + } 3.205 + 3.206 + @Override 3.207 + public void close() throws IOException { 3.208 + if (forwardFlushAndClose) { 3.209 + super.close(); 3.210 + } 3.211 + } 3.212 + 3.213 + @Override 3.214 + public void flush() throws IOException { 3.215 + if (forwardFlushAndClose) { 3.216 + super.flush(); 3.217 + } 3.218 + } 3.219 + 3.220 +}
4.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 4.2 +++ b/src/ca/randelshofer/ImageOutputStreamAdapter.java Wed Oct 26 09:38:27 2011 -0700 4.3 @@ -0,0 +1,144 @@ 4.4 +/* 4.5 + * @(#)ImageOutputStreamAdapter.java 1.1 2011-01-07 4.6 + * 4.7 + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. 4.8 + * All rights reserved. 4.9 + * 4.10 + * You may not use, copy or modify this file, except in compliance with the 4.11 + * license agreement you entered into with Werner Randelshofer. 4.12 + * For details see accompanying license terms. 4.13 + */ 4.14 +package ca.randelshofer; 4.15 + 4.16 +import java.io.IOException; 4.17 +import java.io.OutputStream; 4.18 + 4.19 +import javax.imageio.stream.ImageOutputStream; 4.20 + 4.21 +/** 4.22 + * Adapts an {@code ImageOutputStream} for classes requiring an 4.23 + * {@code OutputStream}. 4.24 + * 4.25 + * @author Werner Randelshofer 4.26 + * @version 1.1 2011-01-07 Fixes performance. 4.27 + * <br>1.0 2010-12-26 Created. 4.28 + */ 4.29 +public class ImageOutputStreamAdapter extends OutputStream { 4.30 + 4.31 + /** 4.32 + * The underlying output stream to be filtered. 4.33 + */ 4.34 + protected ImageOutputStream out; 4.35 + 4.36 + /** 4.37 + * Creates an output stream filter built on top of the specified 4.38 + * underlying output stream. 4.39 + * 4.40 + * @param out the underlying output stream to be assigned to 4.41 + * the field <tt>this.out</tt> for later use, or 4.42 + * <code>null</code> if this instance is to be 4.43 + * created without an underlying stream. 4.44 + */ 4.45 + public ImageOutputStreamAdapter(ImageOutputStream out) { 4.46 + this.out = out; 4.47 + } 4.48 + 4.49 + /** 4.50 + * Writes the specified <code>byte</code> to this output stream. 4.51 + * <p> 4.52 + * The <code>write</code> method of <code>FilterOutputStream</code> 4.53 + * calls the <code>write</code> method of its underlying output stream, 4.54 + * that is, it performs <tt>out.write(b)</tt>. 4.55 + * <p> 4.56 + * Implements the abstract <tt>write</tt> method of <tt>OutputStream</tt>. 4.57 + * 4.58 + * @param b the <code>byte</code>. 4.59 + * @exception IOException if an I/O error occurs. 4.60 + */ 4.61 + @Override 4.62 + public void write(int b) throws IOException { 4.63 + out.write(b); 4.64 + } 4.65 + 4.66 + /** 4.67 + * Writes <code>b.length</code> bytes to this output stream. 4.68 + * <p> 4.69 + * The <code>write</code> method of <code>FilterOutputStream</code> 4.70 + * calls its <code>write</code> method of three arguments with the 4.71 + * arguments <code>b</code>, <code>0</code>, and 4.72 + * <code>b.length</code>. 4.73 + * <p> 4.74 + * Note that this method does not call the one-argument 4.75 + * <code>write</code> method of its underlying stream with the single 4.76 + * argument <code>b</code>. 4.77 + * 4.78 + * @param b the data to be written. 4.79 + * @exception IOException if an I/O error occurs. 4.80 + * @see java.io.FilterOutputStream#write(byte[], int, int) 4.81 + */ 4.82 + @Override 4.83 + public void write(byte b[]) throws IOException { 4.84 + write(b, 0, b.length); 4.85 + } 4.86 + 4.87 + /** 4.88 + * Writes <code>len</code> bytes from the specified 4.89 + * <code>byte</code> array starting at offset <code>off</code> to 4.90 + * this output stream. 4.91 + * <p> 4.92 + * The <code>write</code> method of <code>FilterOutputStream</code> 4.93 + * calls the <code>write</code> method of one argument on each 4.94 + * <code>byte</code> to output. 4.95 + * <p> 4.96 + * Note that this method does not call the <code>write</code> method 4.97 + * of its underlying input stream with the same arguments. Subclasses 4.98 + * of <code>FilterOutputStream</code> should provide a more efficient 4.99 + * implementation of this method. 4.100 + * 4.101 + * @param b the data. 4.102 + * @param off the start offset in the data. 4.103 + * @param len the number of bytes to write. 4.104 + * @exception IOException if an I/O error occurs. 4.105 + * @see java.io.FilterOutputStream#write(int) 4.106 + */ 4.107 + @Override 4.108 + public void write(byte b[], int off, int len) throws IOException { 4.109 + out.write(b,off,len); 4.110 + } 4.111 + 4.112 + /** 4.113 + * Flushes this output stream and forces any buffered output bytes 4.114 + * to be written out to the stream. 4.115 + * <p> 4.116 + * The <code>flush</code> method of <code>FilterOutputStream</code> 4.117 + * calls the <code>flush</code> method of its underlying output stream. 4.118 + * 4.119 + * @exception IOException if an I/O error occurs. 4.120 + * @see java.io.FilterOutputStream#out 4.121 + */ 4.122 + @Override 4.123 + public void flush() throws IOException { 4.124 + out.flush(); 4.125 + } 4.126 + 4.127 + /** 4.128 + * Closes this output stream and releases any system resources 4.129 + * associated with the stream. 4.130 + * <p> 4.131 + * The <code>close</code> method of <code>FilterOutputStream</code> 4.132 + * calls its <code>flush</code> method, and then calls the 4.133 + * <code>close</code> method of its underlying output stream. 4.134 + * 4.135 + * @exception IOException if an I/O error occurs. 4.136 + * @see java.io.FilterOutputStream#flush() 4.137 + * @see java.io.FilterOutputStream#out 4.138 + */ 4.139 + @Override 4.140 + public void close() throws IOException { 4.141 + try { 4.142 + flush(); 4.143 + } finally { 4.144 + out.close(); 4.145 + } 4.146 + } 4.147 +}
5.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 5.2 +++ b/src/ca/randelshofer/SeekableByteArrayOutputStream.java Wed Oct 26 09:38:27 2011 -0700 5.3 @@ -0,0 +1,153 @@ 5.4 +/* 5.5 + * @(#)SeekableByteArrayOutputStream.java 1.0 2010-12-27 5.6 + * 5.7 + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. 5.8 + * All rights reserved. 5.9 + * 5.10 + * You may not use, copy or modify this file, except in compliance with the 5.11 + * license agreement you entered into with Werner Randelshofer. 5.12 + * For details see accompanying license terms. 5.13 + */ 5.14 + 5.15 +package ca.randelshofer; 5.16 + 5.17 +import java.io.ByteArrayOutputStream; 5.18 +import java.io.IOException; 5.19 +import java.io.OutputStream; 5.20 +import java.util.Arrays; 5.21 +import static java.lang.Math.*; 5.22 +/** 5.23 + * {@code SeekableByteArrayOutputStream}. 5.24 + * 5.25 + * @author Werner Randelshofer 5.26 + * @version 1.0 2010-12-27 Created. 5.27 + */ 5.28 +public class SeekableByteArrayOutputStream extends ByteArrayOutputStream { 5.29 + 5.30 + /** 5.31 + * The current stream position. 5.32 + */ 5.33 + private int pos; 5.34 + 5.35 + /** 5.36 + * Creates a new byte array output stream. The buffer capacity is 5.37 + * initially 32 bytes, though its size increases if necessary. 5.38 + */ 5.39 + public SeekableByteArrayOutputStream() { 5.40 + this(32); 5.41 + } 5.42 + 5.43 + /** 5.44 + * Creates a new byte array output stream, with a buffer capacity of 5.45 + * the specified size, in bytes. 5.46 + * 5.47 + * @param size the initial size. 5.48 + * @exception IllegalArgumentException if size is negative. 5.49 + */ 5.50 + public SeekableByteArrayOutputStream(int size) { 5.51 + if (size < 0) { 5.52 + throw new IllegalArgumentException("Negative initial size: " 5.53 + + size); 5.54 + } 5.55 + buf = new byte[size]; 5.56 + } 5.57 + 5.58 + /** 5.59 + * Writes the specified byte to this byte array output stream. 5.60 + * 5.61 + * @param b the byte to be written. 5.62 + */ 5.63 + @Override 5.64 + public synchronized void write(int b) { 5.65 + int newcount = max(pos + 1, count); 5.66 + if (newcount > buf.length) { 5.67 + buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); 5.68 + } 5.69 + buf[pos++] = (byte)b; 5.70 + count = newcount; 5.71 + } 5.72 + 5.73 + /** 5.74 + * Writes <code>len</code> bytes from the specified byte array 5.75 + * starting at offset <code>off</code> to this byte array output stream. 5.76 + * 5.77 + * @param b the data. 5.78 + * @param off the start offset in the data. 5.79 + * @param len the number of bytes to write. 5.80 + */ 5.81 + @Override 5.82 + public synchronized void write(byte b[], int off, int len) { 5.83 + if ((off < 0) || (off > b.length) || (len < 0) || 5.84 + ((off + len) > b.length) || ((off + len) < 0)) { 5.85 + throw new IndexOutOfBoundsException(); 5.86 + } else if (len == 0) { 5.87 + return; 5.88 + } 5.89 + int newcount = max(pos+len,count); 5.90 + if (newcount > buf.length) { 5.91 + buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); 5.92 + } 5.93 + System.arraycopy(b, off, buf, pos, len); 5.94 + pos+=len; 5.95 + count = newcount; 5.96 + } 5.97 + 5.98 + /** 5.99 + * Resets the <code>count</code> field of this byte array output 5.100 + * stream to zero, so that all currently accumulated output in the 5.101 + * output stream is discarded. The output stream can be used again, 5.102 + * reusing the already allocated buffer space. 5.103 + * 5.104 + * @see java.io.ByteArrayInputStream#count 5.105 + */ 5.106 + @Override 5.107 + public synchronized void reset() { 5.108 + count = 0; 5.109 + pos=0; 5.110 + } 5.111 + 5.112 + /** 5.113 + * Sets the current stream position to the desired location. The 5.114 + * next read will occur at this location. The bit offset is set 5.115 + * to 0. 5.116 + * 5.117 + * <p> An <code>IndexOutOfBoundsException</code> will be thrown if 5.118 + * <code>pos</code> is smaller than the flushed position (as 5.119 + * returned by <code>getflushedPosition</code>). 5.120 + * 5.121 + * <p> It is legal to seek past the end of the file; an 5.122 + * <code>EOFException</code> will be thrown only if a read is 5.123 + * performed. 5.124 + * 5.125 + * @param pos a <code>long</code> containing the desired file 5.126 + * pointer position. 5.127 + * 5.128 + * @exception IndexOutOfBoundsException if <code>pos</code> is smaller 5.129 + * than the flushed position. 5.130 + * @exception IOException if any other I/O error occurs. 5.131 + */ 5.132 + public void seek(long pos) throws IOException { 5.133 + this.pos = (int)pos; 5.134 + } 5.135 + 5.136 + /** 5.137 + * Returns the current byte position of the stream. The next write 5.138 + * will take place starting at this offset. 5.139 + * 5.140 + * @return a long containing the position of the stream. 5.141 + * 5.142 + * @exception IOException if an I/O error occurs. 5.143 + */ 5.144 + public long getStreamPosition() throws IOException { 5.145 + return pos; 5.146 + } 5.147 + 5.148 + /** Writes the contents of the byte array into the specified output 5.149 + * stream. 5.150 + * @param out 5.151 + */ 5.152 + public void toOutputStream(OutputStream out) throws IOException { 5.153 + out.write(buf, 0, count); 5.154 + } 5.155 + 5.156 +}
6.1 --- a/src/com/aurellem/capture/Capture.java Wed Oct 26 08:54:12 2011 -0700 6.2 +++ b/src/com/aurellem/capture/Capture.java Wed Oct 26 09:38:27 2011 -0700 6.3 @@ -4,6 +4,8 @@ 6.4 import java.io.IOException; 6.5 6.6 import com.aurellem.capture.video.AVIVideoRecorder; 6.7 +import com.aurellem.capture.video.AbstractVideoRecorder; 6.8 +import com.aurellem.capture.video.XuggleVideoRecorder; 6.9 import com.jme3.app.Application; 6.10 import com.jme3.math.ColorRGBA; 6.11 6.12 @@ -12,7 +14,18 @@ 6.13 public static void SimpleCaptureVideo(Application app, File file) throws IOException{ 6.14 app.getViewPort().setClearFlags(true, true, true); 6.15 app.getViewPort().setBackgroundColor(ColorRGBA.Black); 6.16 - AVIVideoRecorder videoRecorder = new AVIVideoRecorder(file); 6.17 + 6.18 + // The XuggleVideoRecorder is better than the AVIVideoRecorder in every way 6.19 + // except for ease of installation. The excellent work by Werner Randelshofer 6.20 + // is used as a fallback option. Please visit http://www.xuggle.com/ to learn 6.21 + // how to set up the XuggleVideoRecorder. 6.22 + 6.23 + AbstractVideoRecorder videoRecorder; 6.24 + 6.25 + if (file.getCanonicalPath().endsWith(".avi")){ 6.26 + videoRecorder = new AVIVideoRecorder(file);} 6.27 + else { videoRecorder = new XuggleVideoRecorder(file); } 6.28 + 6.29 app.getStateManager().attach(videoRecorder); 6.30 app.getViewPort().addFinalProcessor(videoRecorder); 6.31 }
7.1 --- a/src/com/aurellem/capture/Main.java Wed Oct 26 08:54:12 2011 -0700 7.2 +++ b/src/com/aurellem/capture/Main.java Wed Oct 26 09:38:27 2011 -0700 7.3 @@ -16,7 +16,7 @@ 7.4 import java.io.*; 7.5 import java.util.Random; 7.6 7.7 -import com.aurellem.capture.video.AVIOutputStream; 7.8 +import ca.randelshofer.AVIOutputStream; 7.9 7.10 7.11 /**
8.1 --- a/src/com/aurellem/capture/audio/SeekableByteArrayOutputStream.java Wed Oct 26 08:54:12 2011 -0700 8.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 8.3 @@ -1,153 +0,0 @@ 8.4 -/* 8.5 - * @(#)SeekableByteArrayOutputStream.java 1.0 2010-12-27 8.6 - * 8.7 - * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. 8.8 - * All rights reserved. 8.9 - * 8.10 - * You may not use, copy or modify this file, except in compliance with the 8.11 - * license agreement you entered into with Werner Randelshofer. 8.12 - * For details see accompanying license terms. 8.13 - */ 8.14 - 8.15 -package com.aurellem.capture.audio; 8.16 - 8.17 -import java.io.ByteArrayOutputStream; 8.18 -import java.io.IOException; 8.19 -import java.io.OutputStream; 8.20 -import java.util.Arrays; 8.21 -import static java.lang.Math.*; 8.22 -/** 8.23 - * {@code SeekableByteArrayOutputStream}. 8.24 - * 8.25 - * @author Werner Randelshofer 8.26 - * @version 1.0 2010-12-27 Created. 8.27 - */ 8.28 -public class SeekableByteArrayOutputStream extends ByteArrayOutputStream { 8.29 - 8.30 - /** 8.31 - * The current stream position. 8.32 - */ 8.33 - private int pos; 8.34 - 8.35 - /** 8.36 - * Creates a new byte array output stream. The buffer capacity is 8.37 - * initially 32 bytes, though its size increases if necessary. 8.38 - */ 8.39 - public SeekableByteArrayOutputStream() { 8.40 - this(32); 8.41 - } 8.42 - 8.43 - /** 8.44 - * Creates a new byte array output stream, with a buffer capacity of 8.45 - * the specified size, in bytes. 8.46 - * 8.47 - * @param size the initial size. 8.48 - * @exception IllegalArgumentException if size is negative. 8.49 - */ 8.50 - public SeekableByteArrayOutputStream(int size) { 8.51 - if (size < 0) { 8.52 - throw new IllegalArgumentException("Negative initial size: " 8.53 - + size); 8.54 - } 8.55 - buf = new byte[size]; 8.56 - } 8.57 - 8.58 - /** 8.59 - * Writes the specified byte to this byte array output stream. 8.60 - * 8.61 - * @param b the byte to be written. 8.62 - */ 8.63 - @Override 8.64 - public synchronized void write(int b) { 8.65 - int newcount = max(pos + 1, count); 8.66 - if (newcount > buf.length) { 8.67 - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); 8.68 - } 8.69 - buf[pos++] = (byte)b; 8.70 - count = newcount; 8.71 - } 8.72 - 8.73 - /** 8.74 - * Writes <code>len</code> bytes from the specified byte array 8.75 - * starting at offset <code>off</code> to this byte array output stream. 8.76 - * 8.77 - * @param b the data. 8.78 - * @param off the start offset in the data. 8.79 - * @param len the number of bytes to write. 8.80 - */ 8.81 - @Override 8.82 - public synchronized void write(byte b[], int off, int len) { 8.83 - if ((off < 0) || (off > b.length) || (len < 0) || 8.84 - ((off + len) > b.length) || ((off + len) < 0)) { 8.85 - throw new IndexOutOfBoundsException(); 8.86 - } else if (len == 0) { 8.87 - return; 8.88 - } 8.89 - int newcount = max(pos+len,count); 8.90 - if (newcount > buf.length) { 8.91 - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); 8.92 - } 8.93 - System.arraycopy(b, off, buf, pos, len); 8.94 - pos+=len; 8.95 - count = newcount; 8.96 - } 8.97 - 8.98 - /** 8.99 - * Resets the <code>count</code> field of this byte array output 8.100 - * stream to zero, so that all currently accumulated output in the 8.101 - * output stream is discarded. The output stream can be used again, 8.102 - * reusing the already allocated buffer space. 8.103 - * 8.104 - * @see java.io.ByteArrayInputStream#count 8.105 - */ 8.106 - @Override 8.107 - public synchronized void reset() { 8.108 - count = 0; 8.109 - pos=0; 8.110 - } 8.111 - 8.112 - /** 8.113 - * Sets the current stream position to the desired location. The 8.114 - * next read will occur at this location. The bit offset is set 8.115 - * to 0. 8.116 - * 8.117 - * <p> An <code>IndexOutOfBoundsException</code> will be thrown if 8.118 - * <code>pos</code> is smaller than the flushed position (as 8.119 - * returned by <code>getflushedPosition</code>). 8.120 - * 8.121 - * <p> It is legal to seek past the end of the file; an 8.122 - * <code>EOFException</code> will be thrown only if a read is 8.123 - * performed. 8.124 - * 8.125 - * @param pos a <code>long</code> containing the desired file 8.126 - * pointer position. 8.127 - * 8.128 - * @exception IndexOutOfBoundsException if <code>pos</code> is smaller 8.129 - * than the flushed position. 8.130 - * @exception IOException if any other I/O error occurs. 8.131 - */ 8.132 - public void seek(long pos) throws IOException { 8.133 - this.pos = (int)pos; 8.134 - } 8.135 - 8.136 - /** 8.137 - * Returns the current byte position of the stream. The next write 8.138 - * will take place starting at this offset. 8.139 - * 8.140 - * @return a long containing the position of the stream. 8.141 - * 8.142 - * @exception IOException if an I/O error occurs. 8.143 - */ 8.144 - public long getStreamPosition() throws IOException { 8.145 - return pos; 8.146 - } 8.147 - 8.148 - /** Writes the contents of the byte array into the specified output 8.149 - * stream. 8.150 - * @param out 8.151 - */ 8.152 - public void toOutputStream(OutputStream out) throws IOException { 8.153 - out.write(buf, 0, count); 8.154 - } 8.155 - 8.156 -}
9.1 --- a/src/com/aurellem/capture/hello/HelloVideo.java Wed Oct 26 08:54:12 2011 -0700 9.2 +++ b/src/com/aurellem/capture/hello/HelloVideo.java Wed Oct 26 09:38:27 2011 -0700 9.3 @@ -5,13 +5,11 @@ 9.4 9.5 import com.aurellem.capture.Capture; 9.6 import com.aurellem.capture.IsoTimer; 9.7 -import com.aurellem.capture.video.AVIVideoRecorder; 9.8 import com.aurellem.capture.video.AbstractVideoRecorder; 9.9 import com.jme3.app.SimpleApplication; 9.10 import com.jme3.material.Material; 9.11 import com.jme3.math.ColorRGBA; 9.12 import com.jme3.math.Vector3f; 9.13 -import com.jme3.renderer.ViewPort; 9.14 import com.jme3.scene.Geometry; 9.15 import com.jme3.scene.shape.Box; 9.16 9.17 @@ -22,7 +20,7 @@ 9.18 new File("/home/r/bullshit.avi"); 9.19 */ 9.20 File movingVideo = 9.21 - new File("/home/r/tmp/bullshit2.avi"); 9.22 + new File("/home/r/tmp/bullshit2.flv"); 9.23 9.24 AbstractVideoRecorder movingRecorder ; 9.25
10.1 --- a/src/com/aurellem/capture/video/AVIOutputStream.java Wed Oct 26 08:54:12 2011 -0700 10.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 10.3 @@ -1,1548 +0,0 @@ 10.4 -/** 10.5 - * @(#)AVIOutputStream.java 1.5.1 2011-01-17 10.6 - * 10.7 - * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. 10.8 - * All rights reserved. 10.9 - * 10.10 - * You may not use, copy or modify this file, except in compliance with the 10.11 - * license agreement you entered into with Werner Randelshofer. 10.12 - * For details see accompanying license terms. 10.13 - */ 10.14 -package com.aurellem.capture.video; 10.15 - 10.16 -import java.awt.Dimension; 10.17 -import java.awt.image.BufferedImage; 10.18 -import java.awt.image.DataBufferByte; 10.19 -import java.awt.image.IndexColorModel; 10.20 -import java.awt.image.WritableRaster; 10.21 -import java.io.File; 10.22 -import java.io.FileInputStream; 10.23 -import java.io.IOException; 10.24 -import java.io.InputStream; 10.25 -import java.io.OutputStream; 10.26 -import java.util.Arrays; 10.27 -import java.util.Date; 10.28 -import java.util.LinkedList; 10.29 - 10.30 -import javax.imageio.IIOImage; 10.31 -import javax.imageio.ImageIO; 10.32 -import javax.imageio.ImageWriteParam; 10.33 -import javax.imageio.ImageWriter; 10.34 -import javax.imageio.stream.FileImageOutputStream; 10.35 -import javax.imageio.stream.ImageOutputStream; 10.36 -import javax.imageio.stream.MemoryCacheImageOutputStream; 10.37 - 10.38 -/** 10.39 - * This class supports writing of images into an AVI 1.0 video file. 10.40 - * <p> 10.41 - * The images are written as video frames. 10.42 - * <p> 10.43 - * Video frames can be encoded with one of the following formats: 10.44 - * <ul> 10.45 - * <li>JPEG</li> 10.46 - * <li>PNG</li> 10.47 - * <li>RAW</li> 10.48 - * <li>RLE</li> 10.49 - * </ul> 10.50 - * All frames must have the same format. 10.51 - * When JPG is used each frame can have an individual encoding quality. 10.52 - * <p> 10.53 - * All frames in an AVI file must have the same duration. The duration can 10.54 - * be set by setting an appropriate pair of values using methods 10.55 - * {@link #setFrameRate} and {@link #setTimeScale}. 10.56 - * <p> 10.57 - * The length of an AVI 1.0 file is limited to 1 GB. 10.58 - * This class supports lengths of up to 4 GB, but such files may not work on 10.59 - * all players. 10.60 - * <p> 10.61 - * For detailed information about the AVI RIFF file format see:<br> 10.62 - * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br> 10.63 - * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br> 10.64 - * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br> 10.65 - * 10.66 - * @author Werner Randelshofer 10.67 - * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. 10.68 - * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format. 10.69 - * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets 10.70 - * in "idx1" chunk. 10.71 - * <br>1.3.2 2010-12-27 File size limit is 1 GB. 10.72 - * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets. 10.73 - * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream. 10.74 - * Added method getVideoDimension(). 10.75 - * <br>1.2 2009-08-29 Adds support for RAW video format. 10.76 - * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih 10.77 - * chunk. Changed the API to reflect that AVI works with frame rates instead of 10.78 - * with frame durations. 10.79 - * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG 10.80 - * encoded video. 10.81 - * <br>1.0 2008-08-11 Created. 10.82 - */ 10.83 -public class AVIOutputStream { 10.84 - 10.85 - /** 10.86 - * Underlying output stream. 10.87 - */ 10.88 - private ImageOutputStream out; 10.89 - /** The offset of the QuickTime stream in the underlying ImageOutputStream. 10.90 - * Normally this is 0 unless the underlying stream already contained data 10.91 - * when it was passed to the constructor. 10.92 - */ 10.93 - private long streamOffset; 10.94 - /** Previous frame for delta compression. */ 10.95 - private Object previousData; 10.96 - 10.97 - /** 10.98 - * Supported video encodings. 10.99 - */ 10.100 - public static enum VideoFormat { 10.101 - 10.102 - RAW, RLE, JPG, PNG; 10.103 - } 10.104 - /** 10.105 - * Current video formats. 10.106 - */ 10.107 - private VideoFormat videoFormat; 10.108 - /** 10.109 - * Quality of JPEG encoded video frames. 10.110 - */ 10.111 - private float quality = 0.9f; 10.112 - /** 10.113 - * Creation time of the movie output stream. 10.114 - */ 10.115 - private Date creationTime; 10.116 - /** 10.117 - * Width of the video frames. All frames must have the same width. 10.118 - * The value -1 is used to mark unspecified width. 10.119 - */ 10.120 - private int imgWidth = -1; 10.121 - /** 10.122 - * Height of the video frames. All frames must have the same height. 10.123 - * The value -1 is used to mark unspecified height. 10.124 - */ 10.125 - private int imgHeight = -1; 10.126 - /** Number of bits per pixel. */ 10.127 - private int imgDepth = 24; 10.128 - /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ 10.129 - private IndexColorModel palette; 10.130 - private IndexColorModel previousPalette; 10.131 - /** Video encoder. */ 10.132 - 10.133 - /** 10.134 - * The timeScale of the movie. 10.135 - * <p> 10.136 - * Used with frameRate to specify the time scale that this stream will use. 10.137 - * Dividing frameRate by timeScale gives the number of samples per second. 10.138 - * For video streams, this is the frame rate. For audio streams, this rate 10.139 - * corresponds to the time needed to play nBlockAlign bytes of audio, which 10.140 - * for PCM audio is the just the sample rate. 10.141 - */ 10.142 - private int timeScale = 1; 10.143 - /** 10.144 - * The frameRate of the movie in timeScale units. 10.145 - * <p> 10.146 - * @see timeScale 10.147 - */ 10.148 - private int frameRate = 30; 10.149 - /** Interval between keyframes. */ 10.150 - private int syncInterval = 30; 10.151 - 10.152 - /** 10.153 - * The states of the movie output stream. 10.154 - */ 10.155 - private static enum States { 10.156 - 10.157 - STARTED, FINISHED, CLOSED; 10.158 - } 10.159 - /** 10.160 - * The current state of the movie output stream. 10.161 - */ 10.162 - private States state = States.FINISHED; 10.163 - 10.164 - /** 10.165 - * AVI stores media data in samples. 10.166 - * A sample is a single element in a sequence of time-ordered data. 10.167 - */ 10.168 - private static class Sample { 10.169 - 10.170 - String chunkType; 10.171 - /** Offset of the sample relative to the start of the AVI file. 10.172 - */ 10.173 - long offset; 10.174 - /** Data length of the sample. */ 10.175 - long length; 10.176 - /** 10.177 - * The duration of the sample in time scale units. 10.178 - */ 10.179 - int duration; 10.180 - /** Whether the sample is a sync-sample. */ 10.181 - boolean isSync; 10.182 - 10.183 - /** 10.184 - * Creates a new sample. 10.185 - * @param duration 10.186 - * @param offset 10.187 - * @param length 10.188 - */ 10.189 - public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { 10.190 - this.chunkType = chunkId; 10.191 - this.duration = duration; 10.192 - this.offset = offset; 10.193 - this.length = length; 10.194 - this.isSync = isSync; 10.195 - } 10.196 - } 10.197 - /** 10.198 - * List of video frames. 10.199 - */ 10.200 - private LinkedList<Sample> videoFrames; 10.201 - /** 10.202 - * This chunk holds the whole AVI content. 10.203 - */ 10.204 - private CompositeChunk aviChunk; 10.205 - /** 10.206 - * This chunk holds the movie frames. 10.207 - */ 10.208 - private CompositeChunk moviChunk; 10.209 - /** 10.210 - * This chunk holds the AVI Main Header. 10.211 - */ 10.212 - FixedSizeDataChunk avihChunk; 10.213 - /** 10.214 - * This chunk holds the AVI Stream Header. 10.215 - */ 10.216 - FixedSizeDataChunk strhChunk; 10.217 - /** 10.218 - * This chunk holds the AVI Stream Format Header. 10.219 - */ 10.220 - FixedSizeDataChunk strfChunk; 10.221 - 10.222 - /** 10.223 - * Chunk base class. 10.224 - */ 10.225 - private abstract class Chunk { 10.226 - 10.227 - /** 10.228 - * The chunkType of the chunk. A String with the length of 4 characters. 10.229 - */ 10.230 - protected String chunkType; 10.231 - /** 10.232 - * The offset of the chunk relative to the start of the 10.233 - * ImageOutputStream. 10.234 - */ 10.235 - protected long offset; 10.236 - 10.237 - /** 10.238 - * Creates a new Chunk at the current position of the ImageOutputStream. 10.239 - * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. 10.240 - */ 10.241 - public Chunk(String chunkType) throws IOException { 10.242 - this.chunkType = chunkType; 10.243 - offset = getRelativeStreamPosition(); 10.244 - } 10.245 - 10.246 - /** 10.247 - * Writes the chunk to the ImageOutputStream and disposes it. 10.248 - */ 10.249 - public abstract void finish() throws IOException; 10.250 - 10.251 - /** 10.252 - * Returns the size of the chunk including the size of the chunk header. 10.253 - * @return The size of the chunk. 10.254 - */ 10.255 - public abstract long size(); 10.256 - } 10.257 - 10.258 - /** 10.259 - * A CompositeChunk contains an ordered list of Chunks. 10.260 - */ 10.261 - private class CompositeChunk extends Chunk { 10.262 - 10.263 - /** 10.264 - * The type of the composite. A String with the length of 4 characters. 10.265 - */ 10.266 - protected String compositeType; 10.267 - private LinkedList<Chunk> children; 10.268 - private boolean finished; 10.269 - 10.270 - /** 10.271 - * Creates a new CompositeChunk at the current position of the 10.272 - * ImageOutputStream. 10.273 - * @param compositeType The type of the composite. 10.274 - * @param chunkType The type of the chunk. 10.275 - */ 10.276 - public CompositeChunk(String compositeType, String chunkType) throws IOException { 10.277 - super(chunkType); 10.278 - this.compositeType = compositeType; 10.279 - //out.write 10.280 - out.writeLong(0); // make room for the chunk header 10.281 - out.writeInt(0); // make room for the chunk header 10.282 - children = new LinkedList<Chunk>(); 10.283 - } 10.284 - 10.285 - public void add(Chunk child) throws IOException { 10.286 - if (children.size() > 0) { 10.287 - children.getLast().finish(); 10.288 - } 10.289 - children.add(child); 10.290 - } 10.291 - 10.292 - /** 10.293 - * Writes the chunk and all its children to the ImageOutputStream 10.294 - * and disposes of all resources held by the chunk. 10.295 - * @throws java.io.IOException 10.296 - */ 10.297 - @Override 10.298 - public void finish() throws IOException { 10.299 - if (!finished) { 10.300 - if (size() > 0xffffffffL) { 10.301 - throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); 10.302 - } 10.303 - 10.304 - long pointer = getRelativeStreamPosition(); 10.305 - seekRelative(offset); 10.306 - 10.307 - DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 10.308 - headerData.writeType(compositeType); 10.309 - headerData.writeUInt(size() - 8); 10.310 - headerData.writeType(chunkType); 10.311 - for (Chunk child : children) { 10.312 - child.finish(); 10.313 - } 10.314 - seekRelative(pointer); 10.315 - if (size() % 2 == 1) { 10.316 - out.writeByte(0); // write pad byte 10.317 - } 10.318 - finished = true; 10.319 - } 10.320 - } 10.321 - 10.322 - @Override 10.323 - public long size() { 10.324 - long length = 12; 10.325 - for (Chunk child : children) { 10.326 - length += child.size() + child.size() % 2; 10.327 - } 10.328 - return length; 10.329 - } 10.330 - } 10.331 - 10.332 - /** 10.333 - * Data Chunk. 10.334 - */ 10.335 - private class DataChunk extends Chunk { 10.336 - 10.337 - private DataChunkOutputStream data; 10.338 - private boolean finished; 10.339 - 10.340 - /** 10.341 - * Creates a new DataChunk at the current position of the 10.342 - * ImageOutputStream. 10.343 - * @param chunkType The chunkType of the chunk. 10.344 - */ 10.345 - public DataChunk(String name) throws IOException { 10.346 - super(name); 10.347 - out.writeLong(0); // make room for the chunk header 10.348 - data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); 10.349 - } 10.350 - 10.351 - public DataChunkOutputStream getOutputStream() { 10.352 - if (finished) { 10.353 - throw new IllegalStateException("DataChunk is finished"); 10.354 - } 10.355 - return data; 10.356 - } 10.357 - 10.358 - /** 10.359 - * Returns the offset of this chunk to the beginning of the random access file 10.360 - * @return 10.361 - */ 10.362 - public long getOffset() { 10.363 - return offset; 10.364 - } 10.365 - 10.366 - @Override 10.367 - public void finish() throws IOException { 10.368 - if (!finished) { 10.369 - long sizeBefore = size(); 10.370 - 10.371 - if (size() > 0xffffffffL) { 10.372 - throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); 10.373 - } 10.374 - 10.375 - long pointer = getRelativeStreamPosition(); 10.376 - seekRelative(offset); 10.377 - 10.378 - DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 10.379 - headerData.writeType(chunkType); 10.380 - headerData.writeUInt(size() - 8); 10.381 - seekRelative(pointer); 10.382 - if (size() % 2 == 1) { 10.383 - out.writeByte(0); // write pad byte 10.384 - } 10.385 - finished = true; 10.386 - long sizeAfter = size(); 10.387 - if (sizeBefore != sizeAfter) { 10.388 - System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); 10.389 - } 10.390 - } 10.391 - } 10.392 - 10.393 - @Override 10.394 - public long size() { 10.395 - return 8 + data.size(); 10.396 - } 10.397 - } 10.398 - 10.399 - /** 10.400 - * A DataChunk with a fixed size. 10.401 - */ 10.402 - private class FixedSizeDataChunk extends Chunk { 10.403 - 10.404 - private DataChunkOutputStream data; 10.405 - private boolean finished; 10.406 - private long fixedSize; 10.407 - 10.408 - /** 10.409 - * Creates a new DataChunk at the current position of the 10.410 - * ImageOutputStream. 10.411 - * @param chunkType The chunkType of the chunk. 10.412 - */ 10.413 - public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { 10.414 - super(chunkType); 10.415 - this.fixedSize = fixedSize; 10.416 - data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 10.417 - data.writeType(chunkType); 10.418 - data.writeUInt(fixedSize); 10.419 - data.clearCount(); 10.420 - 10.421 - // Fill fixed size with nulls 10.422 - byte[] buf = new byte[(int) Math.min(512, fixedSize)]; 10.423 - long written = 0; 10.424 - while (written < fixedSize) { 10.425 - data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); 10.426 - written += Math.min(buf.length, fixedSize - written); 10.427 - } 10.428 - if (fixedSize % 2 == 1) { 10.429 - out.writeByte(0); // write pad byte 10.430 - } 10.431 - seekToStartOfData(); 10.432 - } 10.433 - 10.434 - public DataChunkOutputStream getOutputStream() { 10.435 - /*if (finished) { 10.436 - throw new IllegalStateException("DataChunk is finished"); 10.437 - }*/ 10.438 - return data; 10.439 - } 10.440 - 10.441 - /** 10.442 - * Returns the offset of this chunk to the beginning of the random access file 10.443 - * @return 10.444 - */ 10.445 - public long getOffset() { 10.446 - return offset; 10.447 - } 10.448 - 10.449 - public void seekToStartOfData() throws IOException { 10.450 - seekRelative(offset + 8); 10.451 - data.clearCount(); 10.452 - } 10.453 - 10.454 - public void seekToEndOfChunk() throws IOException { 10.455 - seekRelative(offset + 8 + fixedSize + fixedSize % 2); 10.456 - } 10.457 - 10.458 - @Override 10.459 - public void finish() throws IOException { 10.460 - if (!finished) { 10.461 - finished = true; 10.462 - } 10.463 - } 10.464 - 10.465 - @Override 10.466 - public long size() { 10.467 - return 8 + fixedSize; 10.468 - } 10.469 - } 10.470 - 10.471 - /** 10.472 - * Creates a new AVI file with the specified video format and 10.473 - * frame rate. The video has 24 bits per pixel. 10.474 - * 10.475 - * @param file the output file 10.476 - * @param format Selects an encoder for the video format. 10.477 - * @param bitsPerPixel the number of bits per pixel. 10.478 - * @exception IllegalArgumentException if videoFormat is null or if 10.479 - * frame rate is <= 0 10.480 - */ 10.481 - public AVIOutputStream(File file, VideoFormat format) throws IOException { 10.482 - this(file,format,24); 10.483 - } 10.484 - /** 10.485 - * Creates a new AVI file with the specified video format and 10.486 - * frame rate. 10.487 - * 10.488 - * @param file the output file 10.489 - * @param format Selects an encoder for the video format. 10.490 - * @param bitsPerPixel the number of bits per pixel. 10.491 - * @exception IllegalArgumentException if videoFormat is null or if 10.492 - * frame rate is <= 0 10.493 - */ 10.494 - public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { 10.495 - if (format == null) { 10.496 - throw new IllegalArgumentException("format must not be null"); 10.497 - } 10.498 - 10.499 - if (file.exists()) { 10.500 - file.delete(); 10.501 - } 10.502 - this.out = new FileImageOutputStream(file); 10.503 - this.streamOffset = 0; 10.504 - this.videoFormat = format; 10.505 - this.videoFrames = new LinkedList<Sample>(); 10.506 - this.imgDepth = bitsPerPixel; 10.507 - if (imgDepth == 4) { 10.508 - byte[] gray = new byte[16]; 10.509 - for (int i = 0; i < gray.length; i++) { 10.510 - gray[i] = (byte) ((i << 4) | i); 10.511 - } 10.512 - palette = new IndexColorModel(4, 16, gray, gray, gray); 10.513 - } else if (imgDepth == 8) { 10.514 - byte[] gray = new byte[256]; 10.515 - for (int i = 0; i < gray.length; i++) { 10.516 - gray[i] = (byte) i; 10.517 - } 10.518 - palette = new IndexColorModel(8, 256, gray, gray, gray); 10.519 - } 10.520 - 10.521 - } 10.522 - 10.523 - /** 10.524 - * Creates a new AVI output stream with the specified video format and 10.525 - * framerate. 10.526 - * 10.527 - * @param out the underlying output stream 10.528 - * @param format Selects an encoder for the video format. 10.529 - * @exception IllegalArgumentException if videoFormat is null or if 10.530 - * framerate is <= 0 10.531 - */ 10.532 - public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { 10.533 - if (format == null) { 10.534 - throw new IllegalArgumentException("format must not be null"); 10.535 - } 10.536 - this.out = out; 10.537 - this.streamOffset = out.getStreamPosition(); 10.538 - this.videoFormat = format; 10.539 - this.videoFrames = new LinkedList<Sample>(); 10.540 - } 10.541 - 10.542 - /** 10.543 - * Used with frameRate to specify the time scale that this stream will use. 10.544 - * Dividing frameRate by timeScale gives the number of samples per second. 10.545 - * For video streams, this is the frame rate. For audio streams, this rate 10.546 - * corresponds to the time needed to play nBlockAlign bytes of audio, which 10.547 - * for PCM audio is the just the sample rate. 10.548 - * <p> 10.549 - * The default value is 1. 10.550 - * 10.551 - * @param newValue 10.552 - */ 10.553 - public void setTimeScale(int newValue) { 10.554 - if (newValue <= 0) { 10.555 - throw new IllegalArgumentException("timeScale must be greater 0"); 10.556 - } 10.557 - this.timeScale = newValue; 10.558 - } 10.559 - 10.560 - /** 10.561 - * Returns the time scale of this media. 10.562 - * 10.563 - * @return time scale 10.564 - */ 10.565 - public int getTimeScale() { 10.566 - return timeScale; 10.567 - } 10.568 - 10.569 - /** 10.570 - * Sets the rate of video frames in time scale units. 10.571 - * <p> 10.572 - * The default value is 30. Together with the default value 1 of timeScale 10.573 - * this results in 30 frames pers second. 10.574 - * 10.575 - * @param newValue 10.576 - */ 10.577 - public void setFrameRate(int newValue) { 10.578 - if (newValue <= 0) { 10.579 - throw new IllegalArgumentException("frameDuration must be greater 0"); 10.580 - } 10.581 - if (state == States.STARTED) { 10.582 - throw new IllegalStateException("frameDuration must be set before the first frame is written"); 10.583 - } 10.584 - this.frameRate = newValue; 10.585 - } 10.586 - 10.587 - /** 10.588 - * Returns the frame rate of this media. 10.589 - * 10.590 - * @return frame rate 10.591 - */ 10.592 - public int getFrameRate() { 10.593 - return frameRate; 10.594 - } 10.595 - 10.596 - /** Sets the global color palette. */ 10.597 - public void setPalette(IndexColorModel palette) { 10.598 - this.palette = palette; 10.599 - } 10.600 - 10.601 - /** 10.602 - * Sets the compression quality of the video track. 10.603 - * A value of 0 stands for "high compression is important" a value of 10.604 - * 1 for "high image quality is important". 10.605 - * <p> 10.606 - * Changing this value affects frames which are subsequently written 10.607 - * to the AVIOutputStream. Frames which have already been written 10.608 - * are not changed. 10.609 - * <p> 10.610 - * This value has only effect on videos encoded with JPG format. 10.611 - * <p> 10.612 - * The default value is 0.9. 10.613 - * 10.614 - * @param newValue 10.615 - */ 10.616 - public void setVideoCompressionQuality(float newValue) { 10.617 - this.quality = newValue; 10.618 - } 10.619 - 10.620 - /** 10.621 - * Returns the video compression quality. 10.622 - * 10.623 - * @return video compression quality 10.624 - */ 10.625 - public float getVideoCompressionQuality() { 10.626 - return quality; 10.627 - } 10.628 - 10.629 - /** 10.630 - * Sets the dimension of the video track. 10.631 - * <p> 10.632 - * You need to explicitly set the dimension, if you add all frames from 10.633 - * files or input streams. 10.634 - * <p> 10.635 - * If you add frames from buffered images, then AVIOutputStream 10.636 - * can determine the video dimension from the image width and height. 10.637 - * 10.638 - * @param width Must be greater than 0. 10.639 - * @param height Must be greater than 0. 10.640 - */ 10.641 - public void setVideoDimension(int width, int height) { 10.642 - if (width < 1 || height < 1) { 10.643 - throw new IllegalArgumentException("width and height must be greater zero."); 10.644 - } 10.645 - this.imgWidth = width; 10.646 - this.imgHeight = height; 10.647 - } 10.648 - 10.649 - /** 10.650 - * Gets the dimension of the video track. 10.651 - * <p> 10.652 - * Returns null if the dimension is not known. 10.653 - */ 10.654 - public Dimension getVideoDimension() { 10.655 - if (imgWidth < 1 || imgHeight < 1) { 10.656 - return null; 10.657 - } 10.658 - return new Dimension(imgWidth, imgHeight); 10.659 - } 10.660 - 10.661 - /** 10.662 - * Sets the state of the QuickTimeOutpuStream to started. 10.663 - * <p> 10.664 - * If the state is changed by this method, the prolog is 10.665 - * written. 10.666 - */ 10.667 - private void ensureStarted() throws IOException { 10.668 - if (state != States.STARTED) { 10.669 - creationTime = new Date(); 10.670 - writeProlog(); 10.671 - state = States.STARTED; 10.672 - } 10.673 - } 10.674 - 10.675 - /** 10.676 - * Writes a frame to the video track. 10.677 - * <p> 10.678 - * If the dimension of the video track has not been specified yet, it 10.679 - * is derived from the first buffered image added to the AVIOutputStream. 10.680 - * 10.681 - * @param image The frame image. 10.682 - * 10.683 - * @throws IllegalArgumentException if the duration is less than 1, or 10.684 - * if the dimension of the frame does not match the dimension of the video 10.685 - * track. 10.686 - * @throws IOException if writing the image failed. 10.687 - */ 10.688 - public void writeFrame(BufferedImage image) throws IOException { 10.689 - ensureOpen(); 10.690 - ensureStarted(); 10.691 - 10.692 - // Get the dimensions of the first image 10.693 - if (imgWidth == -1) { 10.694 - imgWidth = image.getWidth(); 10.695 - imgHeight = image.getHeight(); 10.696 - } else { 10.697 - // The dimension of the image must match the dimension of the video track 10.698 - if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { 10.699 - throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() 10.700 - + "] (width=" + image.getWidth() + ", height=" + image.getHeight() 10.701 - + ") differs from image[0] (width=" 10.702 - + imgWidth + ", height=" + imgHeight); 10.703 - } 10.704 - } 10.705 - 10.706 - DataChunk videoFrameChunk; 10.707 - long offset = getRelativeStreamPosition(); 10.708 - boolean isSync = true; 10.709 - switch (videoFormat) { 10.710 - case RAW: { 10.711 - switch (imgDepth) { 10.712 - case 4: { 10.713 - IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); 10.714 - int[] imgRGBs = new int[16]; 10.715 - imgPalette.getRGBs(imgRGBs); 10.716 - int[] previousRGBs = new int[16]; 10.717 - if (previousPalette == null) { 10.718 - previousPalette = palette; 10.719 - } 10.720 - previousPalette.getRGBs(previousRGBs); 10.721 - if (!Arrays.equals(imgRGBs, previousRGBs)) { 10.722 - previousPalette = imgPalette; 10.723 - DataChunk paletteChangeChunk = new DataChunk("00pc"); 10.724 - /* 10.725 - int first = imgPalette.getMapSize(); 10.726 - int last = -1; 10.727 - for (int i = 0; i < 16; i++) { 10.728 - if (previousRGBs[i] != imgRGBs[i] && i < first) { 10.729 - first = i; 10.730 - } 10.731 - if (previousRGBs[i] != imgRGBs[i] && i > last) { 10.732 - last = i; 10.733 - } 10.734 - }*/ 10.735 - int first = 0; 10.736 - int last = imgPalette.getMapSize() - 1; 10.737 - /* 10.738 - * typedef struct { 10.739 - BYTE bFirstEntry; 10.740 - BYTE bNumEntries; 10.741 - WORD wFlags; 10.742 - PALETTEENTRY peNew[]; 10.743 - } AVIPALCHANGE; 10.744 - * 10.745 - * typedef struct tagPALETTEENTRY { 10.746 - BYTE peRed; 10.747 - BYTE peGreen; 10.748 - BYTE peBlue; 10.749 - BYTE peFlags; 10.750 - } PALETTEENTRY; 10.751 - */ 10.752 - DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); 10.753 - pOut.writeByte(first);//bFirstEntry 10.754 - pOut.writeByte(last - first + 1);//bNumEntries 10.755 - pOut.writeShort(0);//wFlags 10.756 - 10.757 - for (int i = first; i <= last; i++) { 10.758 - pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red 10.759 - pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green 10.760 - pOut.writeByte(imgRGBs[i] & 0xff); // blue 10.761 - pOut.writeByte(0); // reserved*/ 10.762 - } 10.763 - 10.764 - moviChunk.add(paletteChangeChunk); 10.765 - paletteChangeChunk.finish(); 10.766 - long length = getRelativeStreamPosition() - offset; 10.767 - videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); 10.768 - offset = getRelativeStreamPosition(); 10.769 - } 10.770 - 10.771 - videoFrameChunk = new DataChunk("00db"); 10.772 - byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); 10.773 - byte[] rgb4 = new byte[imgWidth / 2]; 10.774 - for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down 10.775 - for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { 10.776 - rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); 10.777 - } 10.778 - videoFrameChunk.getOutputStream().write(rgb4); 10.779 - } 10.780 - break; 10.781 - } 10.782 - case 8: { 10.783 - IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); 10.784 - int[] imgRGBs = new int[256]; 10.785 - imgPalette.getRGBs(imgRGBs); 10.786 - int[] previousRGBs = new int[256]; 10.787 - if (previousPalette == null) { 10.788 - previousPalette = palette; 10.789 - } 10.790 - previousPalette.getRGBs(previousRGBs); 10.791 - if (!Arrays.equals(imgRGBs, previousRGBs)) { 10.792 - previousPalette = imgPalette; 10.793 - DataChunk paletteChangeChunk = new DataChunk("00pc"); 10.794 - /* 10.795 - int first = imgPalette.getMapSize(); 10.796 - int last = -1; 10.797 - for (int i = 0; i < 16; i++) { 10.798 - if (previousRGBs[i] != imgRGBs[i] && i < first) { 10.799 - first = i; 10.800 - } 10.801 - if (previousRGBs[i] != imgRGBs[i] && i > last) { 10.802 - last = i; 10.803 - } 10.804 - }*/ 10.805 - int first = 0; 10.806 - int last = imgPalette.getMapSize() - 1; 10.807 - /* 10.808 - * typedef struct { 10.809 - BYTE bFirstEntry; 10.810 - BYTE bNumEntries; 10.811 - WORD wFlags; 10.812 - PALETTEENTRY peNew[]; 10.813 - } AVIPALCHANGE; 10.814 - * 10.815 - * typedef struct tagPALETTEENTRY { 10.816 - BYTE peRed; 10.817 - BYTE peGreen; 10.818 - BYTE peBlue; 10.819 - BYTE peFlags; 10.820 - } PALETTEENTRY; 10.821 - */ 10.822 - DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); 10.823 - pOut.writeByte(first);//bFirstEntry 10.824 - pOut.writeByte(last - first + 1);//bNumEntries 10.825 - pOut.writeShort(0);//wFlags 10.826 - 10.827 - for (int i = first; i <= last; i++) { 10.828 - pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red 10.829 - pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green 10.830 - pOut.writeByte(imgRGBs[i] & 0xff); // blue 10.831 - pOut.writeByte(0); // reserved*/ 10.832 - } 10.833 - 10.834 - moviChunk.add(paletteChangeChunk); 10.835 - paletteChangeChunk.finish(); 10.836 - long length = getRelativeStreamPosition() - offset; 10.837 - videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); 10.838 - offset = getRelativeStreamPosition(); 10.839 - } 10.840 - 10.841 - videoFrameChunk = new DataChunk("00db"); 10.842 - byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); 10.843 - for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down 10.844 - videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); 10.845 - } 10.846 - break; 10.847 - } 10.848 - default: { 10.849 - videoFrameChunk = new DataChunk("00db"); 10.850 - WritableRaster raster = image.getRaster(); 10.851 - int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data 10.852 - byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data 10.853 - for (int y = imgHeight - 1; y >= 0; --y) { // Upside down 10.854 - raster.getPixels(0, y, imgWidth, 1, raw); 10.855 - for (int x = 0, n = imgWidth * 3; x < n; x += 3) { 10.856 - bytes[x + 2] = (byte) raw[x]; // Blue 10.857 - bytes[x + 1] = (byte) raw[x + 1]; // Green 10.858 - bytes[x] = (byte) raw[x + 2]; // Red 10.859 - } 10.860 - videoFrameChunk.getOutputStream().write(bytes); 10.861 - } 10.862 - break; 10.863 - } 10.864 - } 10.865 - break; 10.866 - } 10.867 - 10.868 - case JPG: { 10.869 - videoFrameChunk = new DataChunk("00dc"); 10.870 - ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); 10.871 - ImageWriteParam iwParam = iw.getDefaultWriteParam(); 10.872 - iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); 10.873 - iwParam.setCompressionQuality(quality); 10.874 - MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); 10.875 - iw.setOutput(imgOut); 10.876 - IIOImage img = new IIOImage(image, null, null); 10.877 - iw.write(null, img, iwParam); 10.878 - iw.dispose(); 10.879 - break; 10.880 - } 10.881 - case PNG: 10.882 - default: { 10.883 - videoFrameChunk = new DataChunk("00dc"); 10.884 - ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); 10.885 - ImageWriteParam iwParam = iw.getDefaultWriteParam(); 10.886 - MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); 10.887 - iw.setOutput(imgOut); 10.888 - IIOImage img = new IIOImage(image, null, null); 10.889 - iw.write(null, img, iwParam); 10.890 - iw.dispose(); 10.891 - break; 10.892 - } 10.893 - } 10.894 - long length = getRelativeStreamPosition() - offset; 10.895 - moviChunk.add(videoFrameChunk); 10.896 - videoFrameChunk.finish(); 10.897 - 10.898 - videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); 10.899 - if (getRelativeStreamPosition() > 1L << 32) { 10.900 - throw new IOException("AVI file is larger than 4 GB"); 10.901 - } 10.902 - } 10.903 - 10.904 - /** 10.905 - * Writes a frame from a file to the video track. 10.906 - * <p> 10.907 - * This method does not inspect the contents of the file. 10.908 - * For example, Its your responsibility to only add JPG files if you have 10.909 - * chosen the JPEG video format. 10.910 - * <p> 10.911 - * If you add all frames from files or from input streams, then you 10.912 - * have to explicitly set the dimension of the video track before you 10.913 - * call finish() or close(). 10.914 - * 10.915 - * @param file The file which holds the image data. 10.916 - * 10.917 - * @throws IllegalStateException if the duration is less than 1. 10.918 - * @throws IOException if writing the image failed. 10.919 - */ 10.920 - public void writeFrame(File file) throws IOException { 10.921 - FileInputStream in = null; 10.922 - try { 10.923 - in = new FileInputStream(file); 10.924 - writeFrame(in); 10.925 - } finally { 10.926 - if (in != null) { 10.927 - in.close(); 10.928 - } 10.929 - } 10.930 - } 10.931 - 10.932 - /** 10.933 - * Writes a frame to the video track. 10.934 - * <p> 10.935 - * This method does not inspect the contents of the file. 10.936 - * For example, its your responsibility to only add JPG files if you have 10.937 - * chosen the JPEG video format. 10.938 - * <p> 10.939 - * If you add all frames from files or from input streams, then you 10.940 - * have to explicitly set the dimension of the video track before you 10.941 - * call finish() or close(). 10.942 - * 10.943 - * @param in The input stream which holds the image data. 10.944 - * 10.945 - * @throws IllegalArgumentException if the duration is less than 1. 10.946 - * @throws IOException if writing the image failed. 10.947 - */ 10.948 - public void writeFrame(InputStream in) throws IOException { 10.949 - ensureOpen(); 10.950 - ensureStarted(); 10.951 - 10.952 - DataChunk videoFrameChunk = new DataChunk( 10.953 - videoFormat == VideoFormat.RAW ? "00db" : "00dc"); 10.954 - moviChunk.add(videoFrameChunk); 10.955 - OutputStream mdatOut = videoFrameChunk.getOutputStream(); 10.956 - long offset = getRelativeStreamPosition(); 10.957 - byte[] buf = new byte[512]; 10.958 - int len; 10.959 - while ((len = in.read(buf)) != -1) { 10.960 - mdatOut.write(buf, 0, len); 10.961 - } 10.962 - long length = getRelativeStreamPosition() - offset; 10.963 - videoFrameChunk.finish(); 10.964 - videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); 10.965 - if (getRelativeStreamPosition() > 1L << 32) { 10.966 - throw new IOException("AVI file is larger than 4 GB"); 10.967 - } 10.968 - } 10.969 - 10.970 - /** 10.971 - * Closes the movie file as well as the stream being filtered. 10.972 - * 10.973 - * @exception IOException if an I/O error has occurred 10.974 - */ 10.975 - public void close() throws IOException { 10.976 - if (state == States.STARTED) { 10.977 - finish(); 10.978 - } 10.979 - if (state != States.CLOSED) { 10.980 - out.close(); 10.981 - state = States.CLOSED; 10.982 - } 10.983 - } 10.984 - 10.985 - /** 10.986 - * Finishes writing the contents of the AVI output stream without closing 10.987 - * the underlying stream. Use this method when applying multiple filters 10.988 - * in succession to the same output stream. 10.989 - * 10.990 - * @exception IllegalStateException if the dimension of the video track 10.991 - * has not been specified or determined yet. 10.992 - * @exception IOException if an I/O exception has occurred 10.993 - */ 10.994 - public void finish() throws IOException { 10.995 - ensureOpen(); 10.996 - if (state != States.FINISHED) { 10.997 - if (imgWidth == -1 || imgHeight == -1) { 10.998 - throw new IllegalStateException("image width and height must be specified"); 10.999 - } 10.1000 - 10.1001 - moviChunk.finish(); 10.1002 - writeEpilog(); 10.1003 - state = States.FINISHED; 10.1004 - imgWidth = imgHeight = -1; 10.1005 - } 10.1006 - } 10.1007 - 10.1008 - /** 10.1009 - * Check to make sure that this stream has not been closed 10.1010 - */ 10.1011 - private void ensureOpen() throws IOException { 10.1012 - if (state == States.CLOSED) { 10.1013 - throw new IOException("Stream closed"); 10.1014 - } 10.1015 - } 10.1016 - 10.1017 - /** Gets the position relative to the beginning of the QuickTime stream. 10.1018 - * <p> 10.1019 - * Usually this value is equal to the stream position of the underlying 10.1020 - * ImageOutputStream, but can be larger if the underlying stream already 10.1021 - * contained data. 10.1022 - * 10.1023 - * @return The relative stream position. 10.1024 - * @throws IOException 10.1025 - */ 10.1026 - private long getRelativeStreamPosition() throws IOException { 10.1027 - return out.getStreamPosition() - streamOffset; 10.1028 - } 10.1029 - 10.1030 - /** Seeks relative to the beginning of the QuickTime stream. 10.1031 - * <p> 10.1032 - * Usually this equal to seeking in the underlying ImageOutputStream, but 10.1033 - * can be different if the underlying stream already contained data. 10.1034 - * 10.1035 - */ 10.1036 - private void seekRelative(long newPosition) throws IOException { 10.1037 - out.seek(newPosition + streamOffset); 10.1038 - } 10.1039 - 10.1040 - private void writeProlog() throws IOException { 10.1041 - // The file has the following structure: 10.1042 - // 10.1043 - // .RIFF AVI 10.1044 - // ..avih (AVI Header Chunk) 10.1045 - // ..LIST strl 10.1046 - // ...strh (Stream Header Chunk) 10.1047 - // ...strf (Stream Format Chunk) 10.1048 - // ..LIST movi 10.1049 - // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) 10.1050 - // ..idx1 (List of video data chunks and their location in the file) 10.1051 - 10.1052 - // The RIFF AVI Chunk holds the complete movie 10.1053 - aviChunk = new CompositeChunk("RIFF", "AVI "); 10.1054 - CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); 10.1055 - 10.1056 - // Write empty AVI Main Header Chunk - we fill the data in later 10.1057 - aviChunk.add(hdrlChunk); 10.1058 - avihChunk = new FixedSizeDataChunk("avih", 56); 10.1059 - avihChunk.seekToEndOfChunk(); 10.1060 - hdrlChunk.add(avihChunk); 10.1061 - 10.1062 - CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); 10.1063 - hdrlChunk.add(strlChunk); 10.1064 - 10.1065 - // Write empty AVI Stream Header Chunk - we fill the data in later 10.1066 - strhChunk = new FixedSizeDataChunk("strh", 56); 10.1067 - strhChunk.seekToEndOfChunk(); 10.1068 - strlChunk.add(strhChunk); 10.1069 - strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); 10.1070 - strfChunk.seekToEndOfChunk(); 10.1071 - strlChunk.add(strfChunk); 10.1072 - 10.1073 - moviChunk = new CompositeChunk("LIST", "movi"); 10.1074 - aviChunk.add(moviChunk); 10.1075 - 10.1076 - 10.1077 - } 10.1078 - 10.1079 - private void writeEpilog() throws IOException { 10.1080 - // Compute values 10.1081 - int duration = 0; 10.1082 - for (Sample s : videoFrames) { 10.1083 - duration += s.duration; 10.1084 - } 10.1085 - long bufferSize = 0; 10.1086 - for (Sample s : videoFrames) { 10.1087 - if (s.length > bufferSize) { 10.1088 - bufferSize = s.length; 10.1089 - } 10.1090 - } 10.1091 - 10.1092 - 10.1093 - DataChunkOutputStream d; 10.1094 - 10.1095 - /* Create Idx1 Chunk and write data 10.1096 - * ------------- 10.1097 - typedef struct _avioldindex { 10.1098 - FOURCC fcc; 10.1099 - DWORD cb; 10.1100 - struct _avioldindex_entry { 10.1101 - DWORD dwChunkId; 10.1102 - DWORD dwFlags; 10.1103 - DWORD dwOffset; 10.1104 - DWORD dwSize; 10.1105 - } aIndex[]; 10.1106 - } AVIOLDINDEX; 10.1107 - */ 10.1108 - DataChunk idx1Chunk = new DataChunk("idx1"); 10.1109 - aviChunk.add(idx1Chunk); 10.1110 - d = idx1Chunk.getOutputStream(); 10.1111 - long moviListOffset = moviChunk.offset + 8; 10.1112 - //moviListOffset = 0; 10.1113 - for (Sample f : videoFrames) { 10.1114 - 10.1115 - d.writeType(f.chunkType); // dwChunkId 10.1116 - // Specifies a FOURCC that identifies a stream in the AVI file. The 10.1117 - // FOURCC must have the form 'xxyy' where xx is the stream number and yy 10.1118 - // is a two-character code that identifies the contents of the stream: 10.1119 - // 10.1120 - // Two-character code Description 10.1121 - // db Uncompressed video frame 10.1122 - // dc Compressed video frame 10.1123 - // pc Palette change 10.1124 - // wb Audio data 10.1125 - 10.1126 - d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// 10.1127 - | (f.isSync ? 0x10 : 0x0)); // dwFlags 10.1128 - // Specifies a bitwise combination of zero or more of the following 10.1129 - // flags: 10.1130 - // 10.1131 - // Value Name Description 10.1132 - // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. 10.1133 - // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. 10.1134 - // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the 10.1135 - // stream. For example, this flag should be set for 10.1136 - // palette changes. 10.1137 - 10.1138 - d.writeUInt(f.offset - moviListOffset); // dwOffset 10.1139 - // Specifies the location of the data chunk in the file. The value 10.1140 - // should be specified as an offset, in bytes, from the start of the 10.1141 - // 'movi' list; however, in some AVI files it is given as an offset from 10.1142 - // the start of the file. 10.1143 - 10.1144 - d.writeUInt(f.length); // dwSize 10.1145 - // Specifies the size of the data chunk, in bytes. 10.1146 - } 10.1147 - idx1Chunk.finish(); 10.1148 - 10.1149 - /* Write Data into AVI Main Header Chunk 10.1150 - * ------------- 10.1151 - * The AVIMAINHEADER structure defines global information in an AVI file. 10.1152 - * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx 10.1153 - typedef struct _avimainheader { 10.1154 - FOURCC fcc; 10.1155 - DWORD cb; 10.1156 - DWORD dwMicroSecPerFrame; 10.1157 - DWORD dwMaxBytesPerSec; 10.1158 - DWORD dwPaddingGranularity; 10.1159 - DWORD dwFlags; 10.1160 - DWORD dwTotalFrames; 10.1161 - DWORD dwInitialFrames; 10.1162 - DWORD dwStreams; 10.1163 - DWORD dwSuggestedBufferSize; 10.1164 - DWORD dwWidth; 10.1165 - DWORD dwHeight; 10.1166 - DWORD dwReserved[4]; 10.1167 - } AVIMAINHEADER; */ 10.1168 - avihChunk.seekToStartOfData(); 10.1169 - d = avihChunk.getOutputStream(); 10.1170 - 10.1171 - d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame 10.1172 - // Specifies the number of microseconds between frames. 10.1173 - // This value indicates the overall timing for the file. 10.1174 - 10.1175 - d.writeUInt(0); // dwMaxBytesPerSec 10.1176 - // Specifies the approximate maximum data rate of the file. 10.1177 - // This value indicates the number of bytes per second the system 10.1178 - // must handle to present an AVI sequence as specified by the other 10.1179 - // parameters contained in the main header and stream header chunks. 10.1180 - 10.1181 - d.writeUInt(0); // dwPaddingGranularity 10.1182 - // Specifies the alignment for data, in bytes. Pad the data to multiples 10.1183 - // of this value. 10.1184 - 10.1185 - d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) 10.1186 - // Contains a bitwise combination of zero or more of the following 10.1187 - // flags: 10.1188 - // 10.1189 - // Value Name Description 10.1190 - // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. 10.1191 - // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the 10.1192 - // index, rather than the physical ordering of the 10.1193 - // chunks in the file, to determine the order of 10.1194 - // presentation of the data. For example, this flag 10.1195 - // could be used to create a list of frames for 10.1196 - // editing. 10.1197 - // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. 10.1198 - // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially 10.1199 - // allocated file used for capturing real-time 10.1200 - // video. Applications should warn the user before 10.1201 - // writing over a file with this flag set because 10.1202 - // the user probably defragmented this file. 10.1203 - // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted 10.1204 - // data and software. When this flag is used, 10.1205 - // software should not permit the data to be 10.1206 - // duplicated. 10.1207 - 10.1208 - d.writeUInt(videoFrames.size()); // dwTotalFrames 10.1209 - // Specifies the total number of frames of data in the file. 10.1210 - 10.1211 - d.writeUInt(0); // dwInitialFrames 10.1212 - // Specifies the initial frame for interleaved files. Noninterleaved 10.1213 - // files should specify zero. If you are creating interleaved files, 10.1214 - // specify the number of frames in the file prior to the initial frame 10.1215 - // of the AVI sequence in this member. 10.1216 - // To give the audio driver enough audio to work with, the audio data in 10.1217 - // an interleaved file must be skewed from the video data. Typically, 10.1218 - // the audio data should be moved forward enough frames to allow 10.1219 - // approximately 0.75 seconds of audio data to be preloaded. The 10.1220 - // dwInitialRecords member should be set to the number of frames the 10.1221 - // audio is skewed. Also set the same value for the dwInitialFrames 10.1222 - // member of the AVISTREAMHEADER structure in the audio stream header 10.1223 - 10.1224 - d.writeUInt(1); // dwStreams 10.1225 - // Specifies the number of streams in the file. For example, a file with 10.1226 - // audio and video has two streams. 10.1227 - 10.1228 - d.writeUInt(bufferSize); // dwSuggestedBufferSize 10.1229 - // Specifies the suggested buffer size for reading the file. Generally, 10.1230 - // this size should be large enough to contain the largest chunk in the 10.1231 - // file. If set to zero, or if it is too small, the playback software 10.1232 - // will have to reallocate memory during playback, which will reduce 10.1233 - // performance. For an interleaved file, the buffer size should be large 10.1234 - // enough to read an entire record, and not just a chunk. 10.1235 - 10.1236 - 10.1237 - d.writeUInt(imgWidth); // dwWidth 10.1238 - // Specifies the width of the AVI file in pixels. 10.1239 - 10.1240 - d.writeUInt(imgHeight); // dwHeight 10.1241 - // Specifies the height of the AVI file in pixels. 10.1242 - 10.1243 - d.writeUInt(0); // dwReserved[0] 10.1244 - d.writeUInt(0); // dwReserved[1] 10.1245 - d.writeUInt(0); // dwReserved[2] 10.1246 - d.writeUInt(0); // dwReserved[3] 10.1247 - // Reserved. Set this array to zero. 10.1248 - 10.1249 - /* Write Data into AVI Stream Header Chunk 10.1250 - * ------------- 10.1251 - * The AVISTREAMHEADER structure contains information about one stream 10.1252 - * in an AVI file. 10.1253 - * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx 10.1254 - typedef struct _avistreamheader { 10.1255 - FOURCC fcc; 10.1256 - DWORD cb; 10.1257 - FOURCC fccType; 10.1258 - FOURCC fccHandler; 10.1259 - DWORD dwFlags; 10.1260 - WORD wPriority; 10.1261 - WORD wLanguage; 10.1262 - DWORD dwInitialFrames; 10.1263 - DWORD dwScale; 10.1264 - DWORD dwRate; 10.1265 - DWORD dwStart; 10.1266 - DWORD dwLength; 10.1267 - DWORD dwSuggestedBufferSize; 10.1268 - DWORD dwQuality; 10.1269 - DWORD dwSampleSize; 10.1270 - struct { 10.1271 - short int left; 10.1272 - short int top; 10.1273 - short int right; 10.1274 - short int bottom; 10.1275 - } rcFrame; 10.1276 - } AVISTREAMHEADER; 10.1277 - */ 10.1278 - strhChunk.seekToStartOfData(); 10.1279 - d = strhChunk.getOutputStream(); 10.1280 - d.writeType("vids"); // fccType - vids for video stream 10.1281 - // Contains a FOURCC that specifies the type of the data contained in 10.1282 - // the stream. The following standard AVI values for video and audio are 10.1283 - // defined: 10.1284 - // 10.1285 - // FOURCC Description 10.1286 - // 'auds' Audio stream 10.1287 - // 'mids' MIDI stream 10.1288 - // 'txts' Text stream 10.1289 - // 'vids' Video stream 10.1290 - 10.1291 - switch (videoFormat) { 10.1292 - case RAW: 10.1293 - d.writeType("DIB "); // fccHandler - DIB for Raw RGB 10.1294 - break; 10.1295 - case RLE: 10.1296 - d.writeType("RLE "); // fccHandler - Microsoft RLE 10.1297 - break; 10.1298 - case JPG: 10.1299 - d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG 10.1300 - break; 10.1301 - case PNG: 10.1302 - default: 10.1303 - d.writeType("png "); // fccHandler - png for PNG 10.1304 - break; 10.1305 - } 10.1306 - // Optionally, contains a FOURCC that identifies a specific data 10.1307 - // handler. The data handler is the preferred handler for the stream. 10.1308 - // For audio and video streams, this specifies the codec for decoding 10.1309 - // the stream. 10.1310 - 10.1311 - if (imgDepth <= 8) { 10.1312 - d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES 10.1313 - } else { 10.1314 - d.writeUInt(0); // dwFlags 10.1315 - } 10.1316 - 10.1317 - // Contains any flags for the data stream. The bits in the high-order 10.1318 - // word of these flags are specific to the type of data contained in the 10.1319 - // stream. The following standard flags are defined: 10.1320 - // 10.1321 - // Value Name Description 10.1322 - // AVISF_DISABLED 0x00000001 Indicates this stream should not 10.1323 - // be enabled by default. 10.1324 - // AVISF_VIDEO_PALCHANGES 0x00010000 10.1325 - // Indicates this video stream contains 10.1326 - // palette changes. This flag warns the playback 10.1327 - // software that it will need to animate the 10.1328 - // palette. 10.1329 - 10.1330 - d.writeUShort(0); // wPriority 10.1331 - // Specifies priority of a stream type. For example, in a file with 10.1332 - // multiple audio streams, the one with the highest priority might be 10.1333 - // the default stream. 10.1334 - 10.1335 - d.writeUShort(0); // wLanguage 10.1336 - // Language tag. 10.1337 - 10.1338 - d.writeUInt(0); // dwInitialFrames 10.1339 - // Specifies how far audio data is skewed ahead of the video frames in 10.1340 - // interleaved files. Typically, this is about 0.75 seconds. If you are 10.1341 - // creating interleaved files, specify the number of frames in the file 10.1342 - // prior to the initial frame of the AVI sequence in this member. For 10.1343 - // more information, see the remarks for the dwInitialFrames member of 10.1344 - // the AVIMAINHEADER structure. 10.1345 - 10.1346 - d.writeUInt(timeScale); // dwScale 10.1347 - // Used with dwRate to specify the time scale that this stream will use. 10.1348 - // Dividing dwRate by dwScale gives the number of samples per second. 10.1349 - // For video streams, this is the frame rate. For audio streams, this 10.1350 - // rate corresponds to the time needed to play nBlockAlign bytes of 10.1351 - // audio, which for PCM audio is the just the sample rate. 10.1352 - 10.1353 - d.writeUInt(frameRate); // dwRate 10.1354 - // See dwScale. 10.1355 - 10.1356 - d.writeUInt(0); // dwStart 10.1357 - // Specifies the starting time for this stream. The units are defined by 10.1358 - // the dwRate and dwScale members in the main file header. Usually, this 10.1359 - // is zero, but it can specify a delay time for a stream that does not 10.1360 - // start concurrently with the file. 10.1361 - 10.1362 - d.writeUInt(videoFrames.size()); // dwLength 10.1363 - // Specifies the length of this stream. The units are defined by the 10.1364 - // dwRate and dwScale members of the stream's header. 10.1365 - 10.1366 - d.writeUInt(bufferSize); // dwSuggestedBufferSize 10.1367 - // Specifies how large a buffer should be used to read this stream. 10.1368 - // Typically, this contains a value corresponding to the largest chunk 10.1369 - // present in the stream. Using the correct buffer size makes playback 10.1370 - // more efficient. Use zero if you do not know the correct buffer size. 10.1371 - 10.1372 - d.writeInt(-1); // dwQuality 10.1373 - // Specifies an indicator of the quality of the data in the stream. 10.1374 - // Quality is represented as a number between 0 and 10,000. 10.1375 - // For compressed data, this typically represents the value of the 10.1376 - // quality parameter passed to the compression software. If set to –1, 10.1377 - // drivers use the default quality value. 10.1378 - 10.1379 - d.writeUInt(0); // dwSampleSize 10.1380 - // Specifies the size of a single sample of data. This is set to zero 10.1381 - // if the samples can vary in size. If this number is nonzero, then 10.1382 - // multiple samples of data can be grouped into a single chunk within 10.1383 - // the file. If it is zero, each sample of data (such as a video frame) 10.1384 - // must be in a separate chunk. For video streams, this number is 10.1385 - // typically zero, although it can be nonzero if all video frames are 10.1386 - // the same size. For audio streams, this number should be the same as 10.1387 - // the nBlockAlign member of the WAVEFORMATEX structure describing the 10.1388 - // audio. 10.1389 - 10.1390 - d.writeUShort(0); // rcFrame.left 10.1391 - d.writeUShort(0); // rcFrame.top 10.1392 - d.writeUShort(imgWidth); // rcFrame.right 10.1393 - d.writeUShort(imgHeight); // rcFrame.bottom 10.1394 - // Specifies the destination rectangle for a text or video stream within 10.1395 - // the movie rectangle specified by the dwWidth and dwHeight members of 10.1396 - // the AVI main header structure. The rcFrame member is typically used 10.1397 - // in support of multiple video streams. Set this rectangle to the 10.1398 - // coordinates corresponding to the movie rectangle to update the whole 10.1399 - // movie rectangle. Units for this member are pixels. The upper-left 10.1400 - // corner of the destination rectangle is relative to the upper-left 10.1401 - // corner of the movie rectangle. 10.1402 - 10.1403 - /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk 10.1404 - /* ------------- 10.1405 - * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx 10.1406 - typedef struct tagBITMAPINFOHEADER { 10.1407 - DWORD biSize; 10.1408 - LONG biWidth; 10.1409 - LONG biHeight; 10.1410 - WORD biPlanes; 10.1411 - WORD biBitCount; 10.1412 - DWORD biCompression; 10.1413 - DWORD biSizeImage; 10.1414 - LONG biXPelsPerMeter; 10.1415 - LONG biYPelsPerMeter; 10.1416 - DWORD biClrUsed; 10.1417 - DWORD biClrImportant; 10.1418 - } BITMAPINFOHEADER; 10.1419 - */ 10.1420 - strfChunk.seekToStartOfData(); 10.1421 - d = strfChunk.getOutputStream(); 10.1422 - d.writeUInt(40); // biSize 10.1423 - // Specifies the number of bytes required by the structure. This value 10.1424 - // does not include the size of the color table or the size of the color 10.1425 - // masks, if they are appended to the end of structure. 10.1426 - 10.1427 - d.writeInt(imgWidth); // biWidth 10.1428 - // Specifies the width of the bitmap, in pixels. 10.1429 - 10.1430 - d.writeInt(imgHeight); // biHeight 10.1431 - // Specifies the height of the bitmap, in pixels. 10.1432 - // 10.1433 - // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is 10.1434 - // a bottom-up DIB with the origin at the lower left corner. If biHeight 10.1435 - // is negative, the bitmap is a top-down DIB with the origin at the 10.1436 - // upper left corner. 10.1437 - // For YUV bitmaps, the bitmap is always top-down, regardless of the 10.1438 - // sign of biHeight. Decoders should offer YUV formats with postive 10.1439 - // biHeight, but for backward compatibility they should accept YUV 10.1440 - // formats with either positive or negative biHeight. 10.1441 - // For compressed formats, biHeight must be positive, regardless of 10.1442 - // image orientation. 10.1443 - 10.1444 - d.writeShort(1); // biPlanes 10.1445 - // Specifies the number of planes for the target device. This value must 10.1446 - // be set to 1. 10.1447 - 10.1448 - d.writeShort(imgDepth); // biBitCount 10.1449 - // Specifies the number of bits per pixel (bpp). For uncompressed 10.1450 - // formats, this value is the average number of bits per pixel. For 10.1451 - // compressed formats, this value is the implied bit depth of the 10.1452 - // uncompressed image, after the image has been decoded. 10.1453 - 10.1454 - switch (videoFormat) { 10.1455 - case RAW: 10.1456 - default: 10.1457 - d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB 10.1458 - break; 10.1459 - case RLE: 10.1460 - if (imgDepth == 8) { 10.1461 - d.writeInt(1); // biCompression - BI_RLE8 10.1462 - } else if (imgDepth == 4) { 10.1463 - d.writeInt(2); // biCompression - BI_RLE4 10.1464 - } else { 10.1465 - throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); 10.1466 - } 10.1467 - break; 10.1468 - case JPG: 10.1469 - d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG 10.1470 - break; 10.1471 - case PNG: 10.1472 - d.writeType("png "); // biCompression - png for PNG 10.1473 - break; 10.1474 - } 10.1475 - // For compressed video and YUV formats, this member is a FOURCC code, 10.1476 - // specified as a DWORD in little-endian order. For example, YUYV video 10.1477 - // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC 10.1478 - // Codes. 10.1479 - // 10.1480 - // For uncompressed RGB formats, the following values are possible: 10.1481 - // 10.1482 - // Value Description 10.1483 - // BI_RGB 0x00000000 Uncompressed RGB. 10.1484 - // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. 10.1485 - // Valid for 16-bpp and 32-bpp bitmaps. 10.1486 - // 10.1487 - // Note that BI_JPG and BI_PNG are not valid video formats. 10.1488 - // 10.1489 - // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is 10.1490 - // always RGB 555. If biCompression equals BI_BITFIELDS, the format is 10.1491 - // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE 10.1492 - // structure to determine the specific RGB type. 10.1493 - 10.1494 - switch (videoFormat) { 10.1495 - case RAW: 10.1496 - d.writeInt(0); // biSizeImage 10.1497 - break; 10.1498 - case RLE: 10.1499 - case JPG: 10.1500 - case PNG: 10.1501 - default: 10.1502 - if (imgDepth == 4) { 10.1503 - d.writeInt(imgWidth * imgHeight / 2); // biSizeImage 10.1504 - } else { 10.1505 - int bytesPerPixel = Math.max(1, imgDepth / 8); 10.1506 - d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage 10.1507 - } 10.1508 - break; 10.1509 - } 10.1510 - // Specifies the size, in bytes, of the image. This can be set to 0 for 10.1511 - // uncompressed RGB bitmaps. 10.1512 - 10.1513 - d.writeInt(0); // biXPelsPerMeter 10.1514 - // Specifies the horizontal resolution, in pixels per meter, of the 10.1515 - // target device for the bitmap. 10.1516 - 10.1517 - d.writeInt(0); // biYPelsPerMeter 10.1518 - // Specifies the vertical resolution, in pixels per meter, of the target 10.1519 - // device for the bitmap. 10.1520 - 10.1521 - d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed 10.1522 - // Specifies the number of color indices in the color table that are 10.1523 - // actually used by the bitmap. 10.1524 - 10.1525 - d.writeInt(0); // biClrImportant 10.1526 - // Specifies the number of color indices that are considered important 10.1527 - // for displaying the bitmap. If this value is zero, all colors are 10.1528 - // important. 10.1529 - 10.1530 - if (palette != null) { 10.1531 - for (int i = 0, n = palette.getMapSize(); i < n; ++i) { 10.1532 - /* 10.1533 - * typedef struct tagRGBQUAD { 10.1534 - BYTE rgbBlue; 10.1535 - BYTE rgbGreen; 10.1536 - BYTE rgbRed; 10.1537 - BYTE rgbReserved; // This member is reserved and must be zero. 10.1538 - } RGBQUAD; 10.1539 - */ 10.1540 - d.write(palette.getBlue(i)); 10.1541 - d.write(palette.getGreen(i)); 10.1542 - d.write(palette.getRed(i)); 10.1543 - d.write(0); 10.1544 - } 10.1545 - } 10.1546 - 10.1547 - 10.1548 - // ----------------- 10.1549 - aviChunk.finish(); 10.1550 - } 10.1551 -}
11.1 --- a/src/com/aurellem/capture/video/AVIVideoRecorder.java Wed Oct 26 08:54:12 2011 -0700 11.2 +++ b/src/com/aurellem/capture/video/AVIVideoRecorder.java Wed Oct 26 09:38:27 2011 -0700 11.3 @@ -4,6 +4,8 @@ 11.4 import java.io.File; 11.5 import java.io.IOException; 11.6 11.7 +import ca.randelshofer.AVIOutputStream; 11.8 + 11.9 11.10 public class AVIVideoRecorder extends AbstractVideoRecorder{ 11.11
12.1 --- a/src/com/aurellem/capture/video/DataChunkOutputStream.java Wed Oct 26 08:54:12 2011 -0700 12.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 12.3 @@ -1,217 +0,0 @@ 12.4 -/** 12.5 - * @(#)DataChunkOutputStream.java 1.1 2011-01-17 12.6 - * 12.7 - * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. 12.8 - * All rights reserved. 12.9 - * 12.10 - * You may not use, copy or modify this file, except in compliance with the 12.11 - * license agreement you entered into with Werner Randelshofer. 12.12 - * For details see accompanying license terms. 12.13 - */ 12.14 -package com.aurellem.capture.video; 12.15 - 12.16 -import java.io.*; 12.17 - 12.18 -/** 12.19 - * This output stream filter supports common data types used inside 12.20 - * of AVI RIFF Data Chunks. 12.21 - * 12.22 - * @author Werner Randelshofer 12.23 - * @version 1.1 2011-01-17 Adds functionality for blocking flush and close. 12.24 - * <br>1.0.1 2010-04-05 Removed unused constants. 12.25 - * <br>1.0 2008-08-11 Created. 12.26 - */ 12.27 -public class DataChunkOutputStream extends FilterOutputStream { 12.28 - 12.29 - /** 12.30 - * The number of bytes written to the data output stream so far. 12.31 - * If this counter overflows, it will be wrapped to Integer.MAX_VALUE. 12.32 - */ 12.33 - protected long written; 12.34 - 12.35 - /** Whether flush and close request shall be forwarded to underlying stream.*/ 12.36 - private boolean forwardFlushAndClose; 12.37 - 12.38 - public DataChunkOutputStream(OutputStream out) { 12.39 - this(out,true); 12.40 - } 12.41 - public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) { 12.42 - super(out); 12.43 - this.forwardFlushAndClose=forwardFlushAndClose; 12.44 - } 12.45 - 12.46 - /** 12.47 - * Writes an chunk type identifier (4 bytes). 12.48 - * @param s A string with a length of 4 characters. 12.49 - */ 12.50 - public void writeType(String s) throws IOException { 12.51 - if (s.length() != 4) { 12.52 - throw new IllegalArgumentException("type string must have 4 characters"); 12.53 - } 12.54 - 12.55 - try { 12.56 - out.write(s.getBytes("ASCII"), 0, 4); 12.57 - incCount(4); 12.58 - } catch (UnsupportedEncodingException e) { 12.59 - throw new InternalError(e.toString()); 12.60 - } 12.61 - } 12.62 - 12.63 - /** 12.64 - * Writes out a <code>byte</code> to the underlying output stream as 12.65 - * a 1-byte value. If no exception is thrown, the counter 12.66 - * <code>written</code> is incremented by <code>1</code>. 12.67 - * 12.68 - * @param v a <code>byte</code> value to be written. 12.69 - * @exception IOException if an I/O error occurs. 12.70 - * @see java.io.FilterOutputStream#out 12.71 - */ 12.72 - public final void writeByte(int v) throws IOException { 12.73 - out.write(v); 12.74 - incCount(1); 12.75 - } 12.76 - 12.77 - /** 12.78 - * Writes <code>len</code> bytes from the specified byte array 12.79 - * starting at offset <code>off</code> to the underlying output stream. 12.80 - * If no exception is thrown, the counter <code>written</code> is 12.81 - * incremented by <code>len</code>. 12.82 - * 12.83 - * @param b the data. 12.84 - * @param off the start offset in the data. 12.85 - * @param len the number of bytes to write. 12.86 - * @exception IOException if an I/O error occurs. 12.87 - * @see java.io.FilterOutputStream#out 12.88 - */ 12.89 - @Override 12.90 - public synchronized void write(byte b[], int off, int len) 12.91 - throws IOException { 12.92 - out.write(b, off, len); 12.93 - incCount(len); 12.94 - } 12.95 - 12.96 - /** 12.97 - * Writes the specified byte (the low eight bits of the argument 12.98 - * <code>b</code>) to the underlying output stream. If no exception 12.99 - * is thrown, the counter <code>written</code> is incremented by 12.100 - * <code>1</code>. 12.101 - * <p> 12.102 - * Implements the <code>write</code> method of <code>OutputStream</code>. 12.103 - * 12.104 - * @param b the <code>byte</code> to be written. 12.105 - * @exception IOException if an I/O error occurs. 12.106 - * @see java.io.FilterOutputStream#out 12.107 - */ 12.108 - @Override 12.109 - public synchronized void write(int b) throws IOException { 12.110 - out.write(b); 12.111 - incCount(1); 12.112 - } 12.113 - 12.114 - /** 12.115 - * Writes an <code>int</code> to the underlying output stream as four 12.116 - * bytes, high byte first. If no exception is thrown, the counter 12.117 - * <code>written</code> is incremented by <code>4</code>. 12.118 - * 12.119 - * @param v an <code>int</code> to be written. 12.120 - * @exception IOException if an I/O error occurs. 12.121 - * @see java.io.FilterOutputStream#out 12.122 - */ 12.123 - public void writeInt(int v) throws IOException { 12.124 - out.write((v >>> 0) & 0xff); 12.125 - out.write((v >>> 8) & 0xff); 12.126 - out.write((v >>> 16) & 0xff); 12.127 - out.write((v >>> 24) & 0xff); 12.128 - incCount(4); 12.129 - } 12.130 - 12.131 - /** 12.132 - * Writes an unsigned 32 bit integer value. 12.133 - * 12.134 - * @param v The value 12.135 - * @throws java.io.IOException 12.136 - */ 12.137 - public void writeUInt(long v) throws IOException { 12.138 - out.write((int) ((v >>> 0) & 0xff)); 12.139 - out.write((int) ((v >>> 8) & 0xff)); 12.140 - out.write((int) ((v >>> 16) & 0xff)); 12.141 - out.write((int) ((v >>> 24) & 0xff)); 12.142 - incCount(4); 12.143 - } 12.144 - 12.145 - /** 12.146 - * Writes a signed 16 bit integer value. 12.147 - * 12.148 - * @param v The value 12.149 - * @throws java.io.IOException 12.150 - */ 12.151 - public void writeShort(int v) throws IOException { 12.152 - out.write((int) ((v >>> 0) & 0xff)); 12.153 - out.write((int) ((v >> 8) & 0xff)); 12.154 - incCount(2); 12.155 - } 12.156 - 12.157 - public void writeLong(long v) throws IOException { 12.158 - out.write((int) (v >>> 0) & 0xff); 12.159 - out.write((int) (v >>> 8) & 0xff); 12.160 - out.write((int) (v >>> 16) & 0xff); 12.161 - out.write((int) (v >>> 24) & 0xff); 12.162 - out.write((int) (v >>> 32) & 0xff); 12.163 - out.write((int) (v >>> 40) & 0xff); 12.164 - out.write((int) (v >>> 48) & 0xff); 12.165 - out.write((int) (v >>> 56) & 0xff); 12.166 - incCount(8); 12.167 - } 12.168 - 12.169 - public void writeUShort(int v) throws IOException { 12.170 - out.write((int) ((v >>> 0) & 0xff)); 12.171 - out.write((int) ((v >> 8) & 0xff)); 12.172 - incCount(2); 12.173 - } 12.174 - 12.175 - /** 12.176 - * Increases the written counter by the specified value 12.177 - * until it reaches Long.MAX_VALUE. 12.178 - */ 12.179 - protected void incCount(int value) { 12.180 - long temp = written + value; 12.181 - if (temp < 0) { 12.182 - temp = Long.MAX_VALUE; 12.183 - } 12.184 - written = temp; 12.185 - } 12.186 - 12.187 - /** 12.188 - * Returns the current value of the counter <code>written</code>, 12.189 - * the number of bytes written to this data output stream so far. 12.190 - * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. 12.191 - * 12.192 - * @return the value of the <code>written</code> field. 12.193 - * @see java.io.DataOutputStream#written 12.194 - */ 12.195 - public final long size() { 12.196 - return written; 12.197 - } 12.198 - 12.199 - /** 12.200 - * Sets the value of the counter <code>written</code> to 0. 12.201 - */ 12.202 - public void clearCount() { 12.203 - written = 0; 12.204 - } 12.205 - 12.206 - @Override 12.207 - public void close() throws IOException { 12.208 - if (forwardFlushAndClose) { 12.209 - super.close(); 12.210 - } 12.211 - } 12.212 - 12.213 - @Override 12.214 - public void flush() throws IOException { 12.215 - if (forwardFlushAndClose) { 12.216 - super.flush(); 12.217 - } 12.218 - } 12.219 - 12.220 -}
13.1 --- a/src/com/aurellem/capture/video/ImageOutputStreamAdapter.java Wed Oct 26 08:54:12 2011 -0700 13.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 13.3 @@ -1,144 +0,0 @@ 13.4 -/* 13.5 - * @(#)ImageOutputStreamAdapter.java 1.1 2011-01-07 13.6 - * 13.7 - * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. 13.8 - * All rights reserved. 13.9 - * 13.10 - * You may not use, copy or modify this file, except in compliance with the 13.11 - * license agreement you entered into with Werner Randelshofer. 13.12 - * For details see accompanying license terms. 13.13 - */ 13.14 -package com.aurellem.capture.video; 13.15 - 13.16 -import java.io.FilterOutputStream; 13.17 -import java.io.IOException; 13.18 -import java.io.OutputStream; 13.19 -import javax.imageio.stream.ImageOutputStream; 13.20 - 13.21 -/** 13.22 - * Adapts an {@code ImageOutputStream} for classes requiring an 13.23 - * {@code OutputStream}. 13.24 - * 13.25 - * @author Werner Randelshofer 13.26 - * @version 1.1 2011-01-07 Fixes performance. 13.27 - * <br>1.0 2010-12-26 Created. 13.28 - */ 13.29 -public class ImageOutputStreamAdapter extends OutputStream { 13.30 - 13.31 - /** 13.32 - * The underlying output stream to be filtered. 13.33 - */ 13.34 - protected ImageOutputStream out; 13.35 - 13.36 - /** 13.37 - * Creates an output stream filter built on top of the specified 13.38 - * underlying output stream. 13.39 - * 13.40 - * @param out the underlying output stream to be assigned to 13.41 - * the field <tt>this.out</tt> for later use, or 13.42 - * <code>null</code> if this instance is to be 13.43 - * created without an underlying stream. 13.44 - */ 13.45 - public ImageOutputStreamAdapter(ImageOutputStream out) { 13.46 - this.out = out; 13.47 - } 13.48 - 13.49 - /** 13.50 - * Writes the specified <code>byte</code> to this output stream. 13.51 - * <p> 13.52 - * The <code>write</code> method of <code>FilterOutputStream</code> 13.53 - * calls the <code>write</code> method of its underlying output stream, 13.54 - * that is, it performs <tt>out.write(b)</tt>. 13.55 - * <p> 13.56 - * Implements the abstract <tt>write</tt> method of <tt>OutputStream</tt>. 13.57 - * 13.58 - * @param b the <code>byte</code>. 13.59 - * @exception IOException if an I/O error occurs. 13.60 - */ 13.61 - @Override 13.62 - public void write(int b) throws IOException { 13.63 - out.write(b); 13.64 - } 13.65 - 13.66 - /** 13.67 - * Writes <code>b.length</code> bytes to this output stream. 13.68 - * <p> 13.69 - * The <code>write</code> method of <code>FilterOutputStream</code> 13.70 - * calls its <code>write</code> method of three arguments with the 13.71 - * arguments <code>b</code>, <code>0</code>, and 13.72 - * <code>b.length</code>. 13.73 - * <p> 13.74 - * Note that this method does not call the one-argument 13.75 - * <code>write</code> method of its underlying stream with the single 13.76 - * argument <code>b</code>. 13.77 - * 13.78 - * @param b the data to be written. 13.79 - * @exception IOException if an I/O error occurs. 13.80 - * @see java.io.FilterOutputStream#write(byte[], int, int) 13.81 - */ 13.82 - @Override 13.83 - public void write(byte b[]) throws IOException { 13.84 - write(b, 0, b.length); 13.85 - } 13.86 - 13.87 - /** 13.88 - * Writes <code>len</code> bytes from the specified 13.89 - * <code>byte</code> array starting at offset <code>off</code> to 13.90 - * this output stream. 13.91 - * <p> 13.92 - * The <code>write</code> method of <code>FilterOutputStream</code> 13.93 - * calls the <code>write</code> method of one argument on each 13.94 - * <code>byte</code> to output. 13.95 - * <p> 13.96 - * Note that this method does not call the <code>write</code> method 13.97 - * of its underlying input stream with the same arguments. Subclasses 13.98 - * of <code>FilterOutputStream</code> should provide a more efficient 13.99 - * implementation of this method. 13.100 - * 13.101 - * @param b the data. 13.102 - * @param off the start offset in the data. 13.103 - * @param len the number of bytes to write. 13.104 - * @exception IOException if an I/O error occurs. 13.105 - * @see java.io.FilterOutputStream#write(int) 13.106 - */ 13.107 - @Override 13.108 - public void write(byte b[], int off, int len) throws IOException { 13.109 - out.write(b,off,len); 13.110 - } 13.111 - 13.112 - /** 13.113 - * Flushes this output stream and forces any buffered output bytes 13.114 - * to be written out to the stream. 13.115 - * <p> 13.116 - * The <code>flush</code> method of <code>FilterOutputStream</code> 13.117 - * calls the <code>flush</code> method of its underlying output stream. 13.118 - * 13.119 - * @exception IOException if an I/O error occurs. 13.120 - * @see java.io.FilterOutputStream#out 13.121 - */ 13.122 - @Override 13.123 - public void flush() throws IOException { 13.124 - out.flush(); 13.125 - } 13.126 - 13.127 - /** 13.128 - * Closes this output stream and releases any system resources 13.129 - * associated with the stream. 13.130 - * <p> 13.131 - * The <code>close</code> method of <code>FilterOutputStream</code> 13.132 - * calls its <code>flush</code> method, and then calls the 13.133 - * <code>close</code> method of its underlying output stream. 13.134 - * 13.135 - * @exception IOException if an I/O error occurs. 13.136 - * @see java.io.FilterOutputStream#flush() 13.137 - * @see java.io.FilterOutputStream#out 13.138 - */ 13.139 - @Override 13.140 - public void close() throws IOException { 13.141 - try { 13.142 - flush(); 13.143 - } finally { 13.144 - out.close(); 13.145 - } 13.146 - } 13.147 -}
14.1 --- a/src/com/aurellem/capture/video/MicrosoftRLEEncoder.java Wed Oct 26 08:54:12 2011 -0700 14.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 14.3 @@ -1,402 +0,0 @@ 14.4 -/* 14.5 - * @(#)AppleRLEEncoder.java 1.1.1 2011-01-17 14.6 - * 14.7 - * Copyright © 2011 Werner Randelshofer, Immensee, Switzerland. 14.8 - * All rights reserved. 14.9 - * 14.10 - * You may not use, copy or modify this file, except in compliance with the 14.11 - * license agreement you entered into with Werner Randelshofer. 14.12 - * For details see accompanying license terms. 14.13 - */ 14.14 -package com.aurellem.capture.video; 14.15 - 14.16 -import java.io.ByteArrayOutputStream; 14.17 -import java.io.IOException; 14.18 -import java.io.OutputStream; 14.19 -import java.util.Arrays; 14.20 - 14.21 -import com.aurellem.capture.audio.SeekableByteArrayOutputStream; 14.22 - 14.23 -/** 14.24 - * Implements the run length encoding of the Microsoft RLE format. 14.25 - * <p> 14.26 - * Each line of a frame is compressed individually. A line consists of two-byte 14.27 - * op-codes optionally followed by data. The end of the line is marked with 14.28 - * the EOL op-code. 14.29 - * <p> 14.30 - * The following op-codes are supported: 14.31 - * <ul> 14.32 - * <li>{@code 0x00 0x00} 14.33 - * <br>Marks the end of a line.</li> 14.34 - * 14.35 - * <li>{@code 0x00 0x01} 14.36 - * <br>Marks the end of the bitmap.</li> 14.37 - * 14.38 - * <li>{@code 0x00 0x02 x y} 14.39 - * <br> Marks a delta (skip). {@code x} and {@code y} 14.40 - * indicate the horizontal and vertical offset from the current position. 14.41 - * {@code x} and {@code y} are unsigned 8-bit values.</li> 14.42 - * 14.43 - * <li>{@code 0x00 n data{n} 0x00?} 14.44 - * <br> Marks a literal run. {@code n} 14.45 - * gives the number of data bytes that follow. {@code n} must be between 3 and 14.46 - * 255. If n is odd, a pad byte with the value 0x00 must be added. 14.47 - * </li> 14.48 - * <li>{@code n data} 14.49 - * <br> Marks a repetition. {@code n} 14.50 - * gives the number of times the data byte is repeated. {@code n} must be 14.51 - * between 1 and 255. 14.52 - * </li> 14.53 - * </ul> 14.54 - * Example: 14.55 - * <pre> 14.56 - * Compressed data Expanded data 14.57 - * 14.58 - * 03 04 04 04 04 14.59 - * 05 06 06 06 06 06 06 14.60 - * 00 03 45 56 67 00 45 56 67 14.61 - * 02 78 78 78 14.62 - * 00 02 05 01 Move 5 right and 1 down 14.63 - * 02 78 78 78 14.64 - * 00 00 End of line 14.65 - * 09 1E 1E 1E 1E 1E 1E 1E 1E 1E 1E 14.66 - * 00 01 End of RLE bitmap 14.67 - * </pre> 14.68 - * 14.69 - * References:<br/> 14.70 - * <a href="http://wiki.multimedia.cx/index.php?title=Microsoft_RLE">http://wiki.multimedia.cx/index.php?title=Microsoft_RLE</a><br> 14.71 - * 14.72 - * @author Werner Randelshofer 14.73 - * @version 1.1.1 2011-01-17 Removes unused imports. 14.74 - * <br>1.1 2011-01-07 Improves performance. 14.75 - * <br>1.0 2011-01-05 Created. 14.76 - */ 14.77 -public class MicrosoftRLEEncoder { 14.78 - 14.79 - private SeekableByteArrayOutputStream tempSeek=new SeekableByteArrayOutputStream(); 14.80 - private DataChunkOutputStream temp=new DataChunkOutputStream(tempSeek); 14.81 - 14.82 - /** Encodes a 8-bit key frame. 14.83 - * 14.84 - * @param temp The output stream. Must be set to Big-Endian. 14.85 - * @param data The image data. 14.86 - * @param offset The offset to the first pixel in the data array. 14.87 - * @param length The width of the image in data elements. 14.88 - * @param step The number to add to offset to get to the next scanline. 14.89 - */ 14.90 - public void writeKey8(OutputStream out, byte[] data, int offset, int length, int step, int height) 14.91 - throws IOException { 14.92 - tempSeek.reset(); 14.93 - int ymax = offset + height * step; 14.94 - int upsideDown = ymax-step+offset; 14.95 - 14.96 - // Encode each scanline separately 14.97 - for (int y = offset; y < ymax; y += step) { 14.98 - int xy = upsideDown-y; 14.99 - int xymax = xy + length; 14.100 - 14.101 - int literalCount = 0; 14.102 - int repeatCount = 0; 14.103 - for (; xy < xymax; ++xy) { 14.104 - // determine repeat count 14.105 - byte v = data[xy]; 14.106 - for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { 14.107 - if (data[xy] != v) { 14.108 - break; 14.109 - } 14.110 - } 14.111 - xy -= repeatCount; 14.112 - if (repeatCount < 3) { 14.113 - literalCount++; 14.114 - if (literalCount == 254) { 14.115 - temp.write(0);temp.write(literalCount); // Literal OP-code 14.116 - temp.write(data, xy - literalCount + 1, literalCount); 14.117 - literalCount = 0; 14.118 - } 14.119 - } else { 14.120 - if (literalCount > 0) { 14.121 - if (literalCount < 3) { 14.122 - for (; literalCount > 0; --literalCount) { 14.123 - temp.write(1); // Repeat OP-code 14.124 - temp.write(data[xy - literalCount]); 14.125 - } 14.126 - } else { 14.127 - temp.write(0);temp.write(literalCount); // Literal OP-code 14.128 - temp.write(data, xy - literalCount, literalCount); 14.129 - if (literalCount % 2 == 1) { 14.130 - temp.write(0); // pad byte 14.131 - } 14.132 - literalCount = 0; 14.133 - } 14.134 - } 14.135 - temp.write(repeatCount); // Repeat OP-code 14.136 - temp.write(v); 14.137 - xy += repeatCount - 1; 14.138 - } 14.139 - } 14.140 - 14.141 - // flush literal run 14.142 - if (literalCount > 0) { 14.143 - if (literalCount < 3) { 14.144 - for (; literalCount > 0; --literalCount) { 14.145 - temp.write(1); // Repeat OP-code 14.146 - temp.write(data[xy - literalCount]); 14.147 - } 14.148 - } else { 14.149 - temp.write(0);temp.write(literalCount); 14.150 - temp.write(data, xy - literalCount, literalCount); 14.151 - if (literalCount % 2 == 1) { 14.152 - temp.write(0); // pad byte 14.153 - } 14.154 - } 14.155 - literalCount = 0; 14.156 - } 14.157 - 14.158 - temp.write(0);temp.write(0x0000);// End of line 14.159 - } 14.160 - temp.write(0);temp.write(0x0001);// End of bitmap 14.161 - tempSeek.toOutputStream(out); 14.162 - } 14.163 - 14.164 - /** Encodes a 8-bit delta frame. 14.165 - * 14.166 - * @param temp The output stream. Must be set to Big-Endian. 14.167 - * @param data The image data. 14.168 - * @param prev The image data of the previous frame. 14.169 - * @param offset The offset to the first pixel in the data array. 14.170 - * @param length The width of the image in data elements. 14.171 - * @param step The number to add to offset to get to the next scanline. 14.172 - */ 14.173 - public void writeDelta8(OutputStream out, byte[] data, byte[] prev, int offset, int length, int step, int height) 14.174 - throws IOException { 14.175 - 14.176 -tempSeek.reset(); 14.177 - // Determine whether we can skip lines at the beginning 14.178 - int ymin; 14.179 - int ymax = offset + height * step; 14.180 - int upsideDown = ymax-step+offset; 14.181 - scanline: 14.182 - for (ymin = offset; ymin < ymax; ymin += step) { 14.183 - int xy = upsideDown-ymin; 14.184 - int xymax = xy + length; 14.185 - for (; xy < xymax; ++xy) { 14.186 - if (data[xy] != prev[xy]) { 14.187 - break scanline; 14.188 - } 14.189 - } 14.190 - } 14.191 - 14.192 - if (ymin == ymax) { 14.193 - // => Frame is identical to previous one 14.194 - temp.write(0);temp.write(0x0001); // end of bitmap 14.195 - return; 14.196 - } 14.197 - 14.198 - if (ymin > offset) { 14.199 - int verticalOffset = ymin / step; 14.200 - while (verticalOffset > 255) { 14.201 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.202 - temp.write(0); // horizontal offset 14.203 - temp.write(255); // vertical offset 14.204 - verticalOffset -= 255; 14.205 - } 14.206 - if (verticalOffset == 1) { 14.207 - temp.write(0);temp.write(0x0000); // End of line OP-code 14.208 - } else { 14.209 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.210 - temp.write(0); // horizontal offset 14.211 - temp.write(verticalOffset); // vertical offset 14.212 - } 14.213 - } 14.214 - 14.215 - 14.216 - // Determine whether we can skip lines at the end 14.217 - scanline: 14.218 - for (; ymax > ymin; ymax -= step) { 14.219 - int xy = upsideDown-ymax+step; 14.220 - int xymax = xy + length; 14.221 - for (; xy < xymax; ++xy) { 14.222 - if (data[xy] != prev[xy]) { 14.223 - break scanline; 14.224 - } 14.225 - } 14.226 - } 14.227 - //System.out.println("MicrosoftRLEEncoder ymin:" + ymin / step + " ymax" + ymax / step); 14.228 - 14.229 - 14.230 - // Encode each scanline 14.231 - int verticalOffset = 0; 14.232 - for (int y = ymin; y < ymax; y += step) { 14.233 - int xy = upsideDown-y; 14.234 - int xymax = xy + length; 14.235 - 14.236 - // determine skip count 14.237 - int skipCount = 0; 14.238 - for (; xy < xymax; ++xy, ++skipCount) { 14.239 - if (data[xy] != prev[xy]) { 14.240 - break; 14.241 - } 14.242 - } 14.243 - if (skipCount == length) { 14.244 - // => the entire line can be skipped 14.245 - ++verticalOffset; 14.246 - if (verticalOffset == 255) { 14.247 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.248 - temp.write(0); // horizontal offset 14.249 - temp.write(255); // vertical offset 14.250 - verticalOffset = 0; 14.251 - } 14.252 - continue; 14.253 - } 14.254 - 14.255 - if (verticalOffset > 0 || skipCount > 0) { 14.256 - if (verticalOffset == 1 && skipCount == 0) { 14.257 - temp.write(0);temp.write(0x0000); // End of line OP-code 14.258 - } else { 14.259 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.260 - temp.write(Math.min(255, skipCount)); // horizontal offset 14.261 - skipCount -= 255; 14.262 - temp.write(verticalOffset); // vertical offset 14.263 - } 14.264 - verticalOffset = 0; 14.265 - } 14.266 - while (skipCount > 0) { 14.267 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.268 - temp.write(Math.min(255, skipCount)); // horizontal offset 14.269 - temp.write(0); // vertical offset 14.270 - skipCount -= 255; 14.271 - } 14.272 - 14.273 - int literalCount = 0; 14.274 - int repeatCount = 0; 14.275 - for (; xy < xymax; ++xy) { 14.276 - // determine skip count 14.277 - for (skipCount = 0; xy < xymax; ++xy, ++skipCount) { 14.278 - if (data[xy] != prev[xy]) { 14.279 - break; 14.280 - } 14.281 - } 14.282 - xy -= skipCount; 14.283 - 14.284 - // determine repeat count 14.285 - byte v = data[xy]; 14.286 - for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { 14.287 - if (data[xy] != v) { 14.288 - break; 14.289 - } 14.290 - } 14.291 - xy -= repeatCount; 14.292 - 14.293 - if (skipCount < 4 && xy + skipCount < xymax && repeatCount < 3) { 14.294 - literalCount++; 14.295 - if (literalCount == 254) { 14.296 - temp.write(0);temp.write(literalCount); // Literal OP-code 14.297 - temp.write(data, xy - literalCount + 1, literalCount); 14.298 - literalCount = 0; 14.299 - } 14.300 - } else { 14.301 - if (literalCount > 0) { 14.302 - if (literalCount < 3) { 14.303 - for (; literalCount > 0; --literalCount) { 14.304 - temp.write(1); // Repeat OP-code 14.305 - temp.write(data[xy - literalCount]); 14.306 - } 14.307 - } else { 14.308 - temp.write(0);temp.write(literalCount); 14.309 - temp.write(data, xy - literalCount, literalCount); 14.310 - if (literalCount % 2 == 1) { 14.311 - temp.write(0); // pad byte 14.312 - } 14.313 - } 14.314 - literalCount = 0; 14.315 - } 14.316 - if (xy + skipCount == xymax) { 14.317 - // => we can skip until the end of the line without 14.318 - // having to write an op-code 14.319 - xy += skipCount - 1; 14.320 - } else if (skipCount >= repeatCount) { 14.321 - while (skipCount > 255) { 14.322 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.323 - temp.write(255); 14.324 - temp.write(0); 14.325 - xy += 255; 14.326 - skipCount -= 255; 14.327 - } 14.328 - temp.write(0);temp.write(0x0002); // Skip OP-code 14.329 - temp.write(skipCount); 14.330 - temp.write(0); 14.331 - xy += skipCount - 1; 14.332 - } else { 14.333 - temp.write(repeatCount); // Repeat OP-code 14.334 - temp.write(v); 14.335 - xy += repeatCount - 1; 14.336 - } 14.337 - } 14.338 - } 14.339 - 14.340 - // flush literal run 14.341 - if (literalCount > 0) { 14.342 - if (literalCount < 3) { 14.343 - for (; literalCount > 0; --literalCount) { 14.344 - temp.write(1); // Repeat OP-code 14.345 - temp.write(data[xy - literalCount]); 14.346 - } 14.347 - } else { 14.348 - temp.write(0);temp.write(literalCount); 14.349 - temp.write(data, xy - literalCount, literalCount); 14.350 - if (literalCount % 2 == 1) { 14.351 - temp.write(0); // pad byte 14.352 - } 14.353 - } 14.354 - } 14.355 - 14.356 - temp.write(0);temp.write(0x0000); // End of line OP-code 14.357 - } 14.358 - 14.359 - temp.write(0);temp.write(0x0001);// End of bitmap 14.360 - tempSeek.toOutputStream(out); 14.361 - } 14.362 - 14.363 - public static void main(String[] args) { 14.364 - byte[] data = {// 14.365 - 8, 2, 3, 4, 4, 3,7,7,7, 8,// 14.366 - 8, 1, 1, 1, 1, 2,7,7,7, 8,// 14.367 - 8, 0, 2, 0, 0, 0,7,7,7, 8,// 14.368 - 8, 2, 2, 3, 4, 4,7,7,7, 8,// 14.369 - 8, 1, 4, 4, 4, 5,7,7,7, 8}; 14.370 - 14.371 - 14.372 - byte[] prev = {// 14.373 - 8, 3, 3, 3, 3, 3,7,7,7, 8,// 14.374 - 8, 1, 1, 1, 1, 1,7,7,7, 8, // 14.375 - 8, 5, 5, 5, 5, 0,7,7,7, 8,// 14.376 - 8, 2, 2, 0, 0, 0,7,7,7, 8,// 14.377 - 8, 2, 0, 0, 0, 5,7,7,7, 8}; 14.378 - ByteArrayOutputStream buf = new ByteArrayOutputStream(); 14.379 - DataChunkOutputStream out = new DataChunkOutputStream(buf); 14.380 - MicrosoftRLEEncoder enc = new MicrosoftRLEEncoder(); 14.381 - 14.382 - try { 14.383 - enc.writeDelta8(out, data, prev, 1, 8, 10, 5); 14.384 - //enc.writeKey8(out, data, 1, 8, 10,5); 14.385 - out.close(); 14.386 - 14.387 - byte[] result = buf.toByteArray(); 14.388 - System.out.println("size:" + result.length); 14.389 - System.out.println(Arrays.toString(result)); 14.390 - System.out.print("0x ["); 14.391 - 14.392 - for (int i = 0; i < result.length; i++) { 14.393 - if (i != 0) { 14.394 - System.out.print(','); 14.395 - } 14.396 - String hex = "00" + Integer.toHexString(result[i]); 14.397 - System.out.print(hex.substring(hex.length() - 2)); 14.398 - } 14.399 - System.out.println(']'); 14.400 - 14.401 - } catch (IOException ex) { 14.402 - ex.printStackTrace(); 14.403 - } 14.404 - } 14.405 -}
15.1 --- a/src/com/aurellem/capture/video/XuggleVideoRecorder.java Wed Oct 26 08:54:12 2011 -0700 15.2 +++ b/src/com/aurellem/capture/video/XuggleVideoRecorder.java Wed Oct 26 09:38:27 2011 -0700 15.3 @@ -1,5 +1,14 @@ 15.4 package com.aurellem.capture.video; 15.5 15.6 +import java.awt.image.BufferedImage; 15.7 +import java.io.File; 15.8 +import java.io.IOException; 15.9 +import java.util.concurrent.TimeUnit; 15.10 + 15.11 +import com.xuggle.mediatool.IMediaWriter; 15.12 +import com.xuggle.mediatool.ToolFactory; 15.13 +import com.xuggle.xuggler.IRational; 15.14 + 15.15 15.16 /** 15.17 * Handles writing video files using Xuggle. 15.18 @@ -8,10 +17,9 @@ 15.19 * @author Robert McIntyre 15.20 * 15.21 */ 15.22 -/* 15.23 + 15.24 public class XuggleVideoRecorder extends AbstractVideoRecorder{ 15.25 15.26 - 15.27 IMediaWriter writer; 15.28 BufferedImage frame; 15.29 int videoChannel = 0; 15.30 @@ -50,4 +58,4 @@ 15.31 15.32 } 15.33 15.34 -*/ 15.35 +