# HG changeset patch # User Robert McIntyre # Date 1319644452 25200 # Node ID 5dfc9e76881666ad90f51c13f5c9968bd318689c # Parent dde12be0202905e5b0b4a3c8a7499e53b08ec3af moved files diff -r dde12be02029 -r 5dfc9e768816 generate_headers.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/generate_headers.sh Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,1 @@ +javah -classpath bin -d /home/r/proj/audio-send/OpenAL32/Include/ com.aurellem.capture.AudioSend diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/AVIOutputStream.java --- a/src/com/aurellem/capture/AVIOutputStream.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1548 +0,0 @@ -/** - * @(#)AVIOutputStream.java 1.5.1 2011-01-17 - * - * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. - * All rights reserved. - * - * You may not use, copy or modify this file, except in compliance with the - * license agreement you entered into with Werner Randelshofer. - * For details see accompanying license terms. - */ -package com.aurellem.capture; - -import java.awt.Dimension; -import java.awt.image.BufferedImage; -import java.awt.image.DataBufferByte; -import java.awt.image.IndexColorModel; -import java.awt.image.WritableRaster; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Arrays; -import java.util.Date; -import java.util.LinkedList; - -import javax.imageio.IIOImage; -import javax.imageio.ImageIO; -import javax.imageio.ImageWriteParam; -import javax.imageio.ImageWriter; -import javax.imageio.stream.FileImageOutputStream; -import javax.imageio.stream.ImageOutputStream; -import javax.imageio.stream.MemoryCacheImageOutputStream; - -/** - * This class supports writing of images into an AVI 1.0 video file. - *

- * The images are written as video frames. - *

- * Video frames can be encoded with one of the following formats: - *

- * All frames must have the same format. - * When JPG is used each frame can have an individual encoding quality. - *

- * All frames in an AVI file must have the same duration. The duration can - * be set by setting an appropriate pair of values using methods - * {@link #setFrameRate} and {@link #setTimeScale}. - *

- * The length of an AVI 1.0 file is limited to 1 GB. - * This class supports lengths of up to 4 GB, but such files may not work on - * all players. - *

- * For detailed information about the AVI RIFF file format see:
- * msdn.microsoft.com AVI RIFF
- * www.microsoft.com FOURCC for Video Compression
- * www.saettler.com RIFF
- * - * @author Werner Randelshofer - * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. - *
1.5 2011-01-06 Adds support for RLE 8-bit video format. - *
1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets - * in "idx1" chunk. - *
1.3.2 2010-12-27 File size limit is 1 GB. - *
1.3.1 2010-07-19 Fixes seeking and calculation of offsets. - *
1.3 2010-07-08 Adds constructor with ImageOutputStream. - * Added method getVideoDimension(). - *
1.2 2009-08-29 Adds support for RAW video format. - *
1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih - * chunk. Changed the API to reflect that AVI works with frame rates instead of - * with frame durations. - *
1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG - * encoded video. - *
1.0 2008-08-11 Created. - */ -public class AVIOutputStream { - - /** - * Underlying output stream. - */ - private ImageOutputStream out; - /** The offset of the QuickTime stream in the underlying ImageOutputStream. - * Normally this is 0 unless the underlying stream already contained data - * when it was passed to the constructor. - */ - private long streamOffset; - /** Previous frame for delta compression. */ - private Object previousData; - - /** - * Supported video encodings. - */ - public static enum VideoFormat { - - RAW, RLE, JPG, PNG; - } - /** - * Current video formats. - */ - private VideoFormat videoFormat; - /** - * Quality of JPEG encoded video frames. - */ - private float quality = 0.9f; - /** - * Creation time of the movie output stream. - */ - private Date creationTime; - /** - * Width of the video frames. All frames must have the same width. - * The value -1 is used to mark unspecified width. - */ - private int imgWidth = -1; - /** - * Height of the video frames. All frames must have the same height. - * The value -1 is used to mark unspecified height. - */ - private int imgHeight = -1; - /** Number of bits per pixel. */ - private int imgDepth = 24; - /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ - private IndexColorModel palette; - private IndexColorModel previousPalette; - /** Video encoder. */ - - /** - * The timeScale of the movie. - *

- * Used with frameRate to specify the time scale that this stream will use. - * Dividing frameRate by timeScale gives the number of samples per second. - * For video streams, this is the frame rate. For audio streams, this rate - * corresponds to the time needed to play nBlockAlign bytes of audio, which - * for PCM audio is the just the sample rate. - */ - private int timeScale = 1; - /** - * The frameRate of the movie in timeScale units. - *

- * @see timeScale - */ - private int frameRate = 30; - /** Interval between keyframes. */ - private int syncInterval = 30; - - /** - * The states of the movie output stream. - */ - private static enum States { - - STARTED, FINISHED, CLOSED; - } - /** - * The current state of the movie output stream. - */ - private States state = States.FINISHED; - - /** - * AVI stores media data in samples. - * A sample is a single element in a sequence of time-ordered data. - */ - private static class Sample { - - String chunkType; - /** Offset of the sample relative to the start of the AVI file. - */ - long offset; - /** Data length of the sample. */ - long length; - /** - * The duration of the sample in time scale units. - */ - int duration; - /** Whether the sample is a sync-sample. */ - boolean isSync; - - /** - * Creates a new sample. - * @param duration - * @param offset - * @param length - */ - public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { - this.chunkType = chunkId; - this.duration = duration; - this.offset = offset; - this.length = length; - this.isSync = isSync; - } - } - /** - * List of video frames. - */ - private LinkedList videoFrames; - /** - * This chunk holds the whole AVI content. - */ - private CompositeChunk aviChunk; - /** - * This chunk holds the movie frames. - */ - private CompositeChunk moviChunk; - /** - * This chunk holds the AVI Main Header. - */ - FixedSizeDataChunk avihChunk; - /** - * This chunk holds the AVI Stream Header. - */ - FixedSizeDataChunk strhChunk; - /** - * This chunk holds the AVI Stream Format Header. - */ - FixedSizeDataChunk strfChunk; - - /** - * Chunk base class. - */ - private abstract class Chunk { - - /** - * The chunkType of the chunk. A String with the length of 4 characters. - */ - protected String chunkType; - /** - * The offset of the chunk relative to the start of the - * ImageOutputStream. - */ - protected long offset; - - /** - * Creates a new Chunk at the current position of the ImageOutputStream. - * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. - */ - public Chunk(String chunkType) throws IOException { - this.chunkType = chunkType; - offset = getRelativeStreamPosition(); - } - - /** - * Writes the chunk to the ImageOutputStream and disposes it. - */ - public abstract void finish() throws IOException; - - /** - * Returns the size of the chunk including the size of the chunk header. - * @return The size of the chunk. - */ - public abstract long size(); - } - - /** - * A CompositeChunk contains an ordered list of Chunks. - */ - private class CompositeChunk extends Chunk { - - /** - * The type of the composite. A String with the length of 4 characters. - */ - protected String compositeType; - private LinkedList children; - private boolean finished; - - /** - * Creates a new CompositeChunk at the current position of the - * ImageOutputStream. - * @param compositeType The type of the composite. - * @param chunkType The type of the chunk. - */ - public CompositeChunk(String compositeType, String chunkType) throws IOException { - super(chunkType); - this.compositeType = compositeType; - //out.write - out.writeLong(0); // make room for the chunk header - out.writeInt(0); // make room for the chunk header - children = new LinkedList(); - } - - public void add(Chunk child) throws IOException { - if (children.size() > 0) { - children.getLast().finish(); - } - children.add(child); - } - - /** - * Writes the chunk and all its children to the ImageOutputStream - * and disposes of all resources held by the chunk. - * @throws java.io.IOException - */ - @Override - public void finish() throws IOException { - if (!finished) { - if (size() > 0xffffffffL) { - throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); - } - - long pointer = getRelativeStreamPosition(); - seekRelative(offset); - - DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); - headerData.writeType(compositeType); - headerData.writeUInt(size() - 8); - headerData.writeType(chunkType); - for (Chunk child : children) { - child.finish(); - } - seekRelative(pointer); - if (size() % 2 == 1) { - out.writeByte(0); // write pad byte - } - finished = true; - } - } - - @Override - public long size() { - long length = 12; - for (Chunk child : children) { - length += child.size() + child.size() % 2; - } - return length; - } - } - - /** - * Data Chunk. - */ - private class DataChunk extends Chunk { - - private DataChunkOutputStream data; - private boolean finished; - - /** - * Creates a new DataChunk at the current position of the - * ImageOutputStream. - * @param chunkType The chunkType of the chunk. - */ - public DataChunk(String name) throws IOException { - super(name); - out.writeLong(0); // make room for the chunk header - data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); - } - - public DataChunkOutputStream getOutputStream() { - if (finished) { - throw new IllegalStateException("DataChunk is finished"); - } - return data; - } - - /** - * Returns the offset of this chunk to the beginning of the random access file - * @return - */ - public long getOffset() { - return offset; - } - - @Override - public void finish() throws IOException { - if (!finished) { - long sizeBefore = size(); - - if (size() > 0xffffffffL) { - throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); - } - - long pointer = getRelativeStreamPosition(); - seekRelative(offset); - - DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); - headerData.writeType(chunkType); - headerData.writeUInt(size() - 8); - seekRelative(pointer); - if (size() % 2 == 1) { - out.writeByte(0); // write pad byte - } - finished = true; - long sizeAfter = size(); - if (sizeBefore != sizeAfter) { - System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); - } - } - } - - @Override - public long size() { - return 8 + data.size(); - } - } - - /** - * A DataChunk with a fixed size. - */ - private class FixedSizeDataChunk extends Chunk { - - private DataChunkOutputStream data; - private boolean finished; - private long fixedSize; - - /** - * Creates a new DataChunk at the current position of the - * ImageOutputStream. - * @param chunkType The chunkType of the chunk. - */ - public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { - super(chunkType); - this.fixedSize = fixedSize; - data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); - data.writeType(chunkType); - data.writeUInt(fixedSize); - data.clearCount(); - - // Fill fixed size with nulls - byte[] buf = new byte[(int) Math.min(512, fixedSize)]; - long written = 0; - while (written < fixedSize) { - data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); - written += Math.min(buf.length, fixedSize - written); - } - if (fixedSize % 2 == 1) { - out.writeByte(0); // write pad byte - } - seekToStartOfData(); - } - - public DataChunkOutputStream getOutputStream() { - /*if (finished) { - throw new IllegalStateException("DataChunk is finished"); - }*/ - return data; - } - - /** - * Returns the offset of this chunk to the beginning of the random access file - * @return - */ - public long getOffset() { - return offset; - } - - public void seekToStartOfData() throws IOException { - seekRelative(offset + 8); - data.clearCount(); - } - - public void seekToEndOfChunk() throws IOException { - seekRelative(offset + 8 + fixedSize + fixedSize % 2); - } - - @Override - public void finish() throws IOException { - if (!finished) { - finished = true; - } - } - - @Override - public long size() { - return 8 + fixedSize; - } - } - - /** - * Creates a new AVI file with the specified video format and - * frame rate. The video has 24 bits per pixel. - * - * @param file the output file - * @param format Selects an encoder for the video format. - * @param bitsPerPixel the number of bits per pixel. - * @exception IllegalArgumentException if videoFormat is null or if - * frame rate is <= 0 - */ - public AVIOutputStream(File file, VideoFormat format) throws IOException { - this(file,format,24); - } - /** - * Creates a new AVI file with the specified video format and - * frame rate. - * - * @param file the output file - * @param format Selects an encoder for the video format. - * @param bitsPerPixel the number of bits per pixel. - * @exception IllegalArgumentException if videoFormat is null or if - * frame rate is <= 0 - */ - public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { - if (format == null) { - throw new IllegalArgumentException("format must not be null"); - } - - if (file.exists()) { - file.delete(); - } - this.out = new FileImageOutputStream(file); - this.streamOffset = 0; - this.videoFormat = format; - this.videoFrames = new LinkedList(); - this.imgDepth = bitsPerPixel; - if (imgDepth == 4) { - byte[] gray = new byte[16]; - for (int i = 0; i < gray.length; i++) { - gray[i] = (byte) ((i << 4) | i); - } - palette = new IndexColorModel(4, 16, gray, gray, gray); - } else if (imgDepth == 8) { - byte[] gray = new byte[256]; - for (int i = 0; i < gray.length; i++) { - gray[i] = (byte) i; - } - palette = new IndexColorModel(8, 256, gray, gray, gray); - } - - } - - /** - * Creates a new AVI output stream with the specified video format and - * framerate. - * - * @param out the underlying output stream - * @param format Selects an encoder for the video format. - * @exception IllegalArgumentException if videoFormat is null or if - * framerate is <= 0 - */ - public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { - if (format == null) { - throw new IllegalArgumentException("format must not be null"); - } - this.out = out; - this.streamOffset = out.getStreamPosition(); - this.videoFormat = format; - this.videoFrames = new LinkedList(); - } - - /** - * Used with frameRate to specify the time scale that this stream will use. - * Dividing frameRate by timeScale gives the number of samples per second. - * For video streams, this is the frame rate. For audio streams, this rate - * corresponds to the time needed to play nBlockAlign bytes of audio, which - * for PCM audio is the just the sample rate. - *

- * The default value is 1. - * - * @param newValue - */ - public void setTimeScale(int newValue) { - if (newValue <= 0) { - throw new IllegalArgumentException("timeScale must be greater 0"); - } - this.timeScale = newValue; - } - - /** - * Returns the time scale of this media. - * - * @return time scale - */ - public int getTimeScale() { - return timeScale; - } - - /** - * Sets the rate of video frames in time scale units. - *

- * The default value is 30. Together with the default value 1 of timeScale - * this results in 30 frames pers second. - * - * @param newValue - */ - public void setFrameRate(int newValue) { - if (newValue <= 0) { - throw new IllegalArgumentException("frameDuration must be greater 0"); - } - if (state == States.STARTED) { - throw new IllegalStateException("frameDuration must be set before the first frame is written"); - } - this.frameRate = newValue; - } - - /** - * Returns the frame rate of this media. - * - * @return frame rate - */ - public int getFrameRate() { - return frameRate; - } - - /** Sets the global color palette. */ - public void setPalette(IndexColorModel palette) { - this.palette = palette; - } - - /** - * Sets the compression quality of the video track. - * A value of 0 stands for "high compression is important" a value of - * 1 for "high image quality is important". - *

- * Changing this value affects frames which are subsequently written - * to the AVIOutputStream. Frames which have already been written - * are not changed. - *

- * This value has only effect on videos encoded with JPG format. - *

- * The default value is 0.9. - * - * @param newValue - */ - public void setVideoCompressionQuality(float newValue) { - this.quality = newValue; - } - - /** - * Returns the video compression quality. - * - * @return video compression quality - */ - public float getVideoCompressionQuality() { - return quality; - } - - /** - * Sets the dimension of the video track. - *

- * You need to explicitly set the dimension, if you add all frames from - * files or input streams. - *

- * If you add frames from buffered images, then AVIOutputStream - * can determine the video dimension from the image width and height. - * - * @param width Must be greater than 0. - * @param height Must be greater than 0. - */ - public void setVideoDimension(int width, int height) { - if (width < 1 || height < 1) { - throw new IllegalArgumentException("width and height must be greater zero."); - } - this.imgWidth = width; - this.imgHeight = height; - } - - /** - * Gets the dimension of the video track. - *

- * Returns null if the dimension is not known. - */ - public Dimension getVideoDimension() { - if (imgWidth < 1 || imgHeight < 1) { - return null; - } - return new Dimension(imgWidth, imgHeight); - } - - /** - * Sets the state of the QuickTimeOutpuStream to started. - *

- * If the state is changed by this method, the prolog is - * written. - */ - private void ensureStarted() throws IOException { - if (state != States.STARTED) { - creationTime = new Date(); - writeProlog(); - state = States.STARTED; - } - } - - /** - * Writes a frame to the video track. - *

- * If the dimension of the video track has not been specified yet, it - * is derived from the first buffered image added to the AVIOutputStream. - * - * @param image The frame image. - * - * @throws IllegalArgumentException if the duration is less than 1, or - * if the dimension of the frame does not match the dimension of the video - * track. - * @throws IOException if writing the image failed. - */ - public void writeFrame(BufferedImage image) throws IOException { - ensureOpen(); - ensureStarted(); - - // Get the dimensions of the first image - if (imgWidth == -1) { - imgWidth = image.getWidth(); - imgHeight = image.getHeight(); - } else { - // The dimension of the image must match the dimension of the video track - if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { - throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() - + "] (width=" + image.getWidth() + ", height=" + image.getHeight() - + ") differs from image[0] (width=" - + imgWidth + ", height=" + imgHeight); - } - } - - DataChunk videoFrameChunk; - long offset = getRelativeStreamPosition(); - boolean isSync = true; - switch (videoFormat) { - case RAW: { - switch (imgDepth) { - case 4: { - IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); - int[] imgRGBs = new int[16]; - imgPalette.getRGBs(imgRGBs); - int[] previousRGBs = new int[16]; - if (previousPalette == null) { - previousPalette = palette; - } - previousPalette.getRGBs(previousRGBs); - if (!Arrays.equals(imgRGBs, previousRGBs)) { - previousPalette = imgPalette; - DataChunk paletteChangeChunk = new DataChunk("00pc"); - /* - int first = imgPalette.getMapSize(); - int last = -1; - for (int i = 0; i < 16; i++) { - if (previousRGBs[i] != imgRGBs[i] && i < first) { - first = i; - } - if (previousRGBs[i] != imgRGBs[i] && i > last) { - last = i; - } - }*/ - int first = 0; - int last = imgPalette.getMapSize() - 1; - /* - * typedef struct { - BYTE bFirstEntry; - BYTE bNumEntries; - WORD wFlags; - PALETTEENTRY peNew[]; - } AVIPALCHANGE; - * - * typedef struct tagPALETTEENTRY { - BYTE peRed; - BYTE peGreen; - BYTE peBlue; - BYTE peFlags; - } PALETTEENTRY; - */ - DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); - pOut.writeByte(first);//bFirstEntry - pOut.writeByte(last - first + 1);//bNumEntries - pOut.writeShort(0);//wFlags - - for (int i = first; i <= last; i++) { - pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red - pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green - pOut.writeByte(imgRGBs[i] & 0xff); // blue - pOut.writeByte(0); // reserved*/ - } - - moviChunk.add(paletteChangeChunk); - paletteChangeChunk.finish(); - long length = getRelativeStreamPosition() - offset; - videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); - offset = getRelativeStreamPosition(); - } - - videoFrameChunk = new DataChunk("00db"); - byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); - byte[] rgb4 = new byte[imgWidth / 2]; - for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down - for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { - rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); - } - videoFrameChunk.getOutputStream().write(rgb4); - } - break; - } - case 8: { - IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); - int[] imgRGBs = new int[256]; - imgPalette.getRGBs(imgRGBs); - int[] previousRGBs = new int[256]; - if (previousPalette == null) { - previousPalette = palette; - } - previousPalette.getRGBs(previousRGBs); - if (!Arrays.equals(imgRGBs, previousRGBs)) { - previousPalette = imgPalette; - DataChunk paletteChangeChunk = new DataChunk("00pc"); - /* - int first = imgPalette.getMapSize(); - int last = -1; - for (int i = 0; i < 16; i++) { - if (previousRGBs[i] != imgRGBs[i] && i < first) { - first = i; - } - if (previousRGBs[i] != imgRGBs[i] && i > last) { - last = i; - } - }*/ - int first = 0; - int last = imgPalette.getMapSize() - 1; - /* - * typedef struct { - BYTE bFirstEntry; - BYTE bNumEntries; - WORD wFlags; - PALETTEENTRY peNew[]; - } AVIPALCHANGE; - * - * typedef struct tagPALETTEENTRY { - BYTE peRed; - BYTE peGreen; - BYTE peBlue; - BYTE peFlags; - } PALETTEENTRY; - */ - DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); - pOut.writeByte(first);//bFirstEntry - pOut.writeByte(last - first + 1);//bNumEntries - pOut.writeShort(0);//wFlags - - for (int i = first; i <= last; i++) { - pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red - pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green - pOut.writeByte(imgRGBs[i] & 0xff); // blue - pOut.writeByte(0); // reserved*/ - } - - moviChunk.add(paletteChangeChunk); - paletteChangeChunk.finish(); - long length = getRelativeStreamPosition() - offset; - videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); - offset = getRelativeStreamPosition(); - } - - videoFrameChunk = new DataChunk("00db"); - byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); - for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down - videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); - } - break; - } - default: { - videoFrameChunk = new DataChunk("00db"); - WritableRaster raster = image.getRaster(); - int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data - byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data - for (int y = imgHeight - 1; y >= 0; --y) { // Upside down - raster.getPixels(0, y, imgWidth, 1, raw); - for (int x = 0, n = imgWidth * 3; x < n; x += 3) { - bytes[x + 2] = (byte) raw[x]; // Blue - bytes[x + 1] = (byte) raw[x + 1]; // Green - bytes[x] = (byte) raw[x + 2]; // Red - } - videoFrameChunk.getOutputStream().write(bytes); - } - break; - } - } - break; - } - - case JPG: { - videoFrameChunk = new DataChunk("00dc"); - ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); - ImageWriteParam iwParam = iw.getDefaultWriteParam(); - iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); - iwParam.setCompressionQuality(quality); - MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); - iw.setOutput(imgOut); - IIOImage img = new IIOImage(image, null, null); - iw.write(null, img, iwParam); - iw.dispose(); - break; - } - case PNG: - default: { - videoFrameChunk = new DataChunk("00dc"); - ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); - ImageWriteParam iwParam = iw.getDefaultWriteParam(); - MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); - iw.setOutput(imgOut); - IIOImage img = new IIOImage(image, null, null); - iw.write(null, img, iwParam); - iw.dispose(); - break; - } - } - long length = getRelativeStreamPosition() - offset; - moviChunk.add(videoFrameChunk); - videoFrameChunk.finish(); - - videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); - if (getRelativeStreamPosition() > 1L << 32) { - throw new IOException("AVI file is larger than 4 GB"); - } - } - - /** - * Writes a frame from a file to the video track. - *

- * This method does not inspect the contents of the file. - * For example, Its your responsibility to only add JPG files if you have - * chosen the JPEG video format. - *

- * If you add all frames from files or from input streams, then you - * have to explicitly set the dimension of the video track before you - * call finish() or close(). - * - * @param file The file which holds the image data. - * - * @throws IllegalStateException if the duration is less than 1. - * @throws IOException if writing the image failed. - */ - public void writeFrame(File file) throws IOException { - FileInputStream in = null; - try { - in = new FileInputStream(file); - writeFrame(in); - } finally { - if (in != null) { - in.close(); - } - } - } - - /** - * Writes a frame to the video track. - *

- * This method does not inspect the contents of the file. - * For example, its your responsibility to only add JPG files if you have - * chosen the JPEG video format. - *

- * If you add all frames from files or from input streams, then you - * have to explicitly set the dimension of the video track before you - * call finish() or close(). - * - * @param in The input stream which holds the image data. - * - * @throws IllegalArgumentException if the duration is less than 1. - * @throws IOException if writing the image failed. - */ - public void writeFrame(InputStream in) throws IOException { - ensureOpen(); - ensureStarted(); - - DataChunk videoFrameChunk = new DataChunk( - videoFormat == VideoFormat.RAW ? "00db" : "00dc"); - moviChunk.add(videoFrameChunk); - OutputStream mdatOut = videoFrameChunk.getOutputStream(); - long offset = getRelativeStreamPosition(); - byte[] buf = new byte[512]; - int len; - while ((len = in.read(buf)) != -1) { - mdatOut.write(buf, 0, len); - } - long length = getRelativeStreamPosition() - offset; - videoFrameChunk.finish(); - videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); - if (getRelativeStreamPosition() > 1L << 32) { - throw new IOException("AVI file is larger than 4 GB"); - } - } - - /** - * Closes the movie file as well as the stream being filtered. - * - * @exception IOException if an I/O error has occurred - */ - public void close() throws IOException { - if (state == States.STARTED) { - finish(); - } - if (state != States.CLOSED) { - out.close(); - state = States.CLOSED; - } - } - - /** - * Finishes writing the contents of the AVI output stream without closing - * the underlying stream. Use this method when applying multiple filters - * in succession to the same output stream. - * - * @exception IllegalStateException if the dimension of the video track - * has not been specified or determined yet. - * @exception IOException if an I/O exception has occurred - */ - public void finish() throws IOException { - ensureOpen(); - if (state != States.FINISHED) { - if (imgWidth == -1 || imgHeight == -1) { - throw new IllegalStateException("image width and height must be specified"); - } - - moviChunk.finish(); - writeEpilog(); - state = States.FINISHED; - imgWidth = imgHeight = -1; - } - } - - /** - * Check to make sure that this stream has not been closed - */ - private void ensureOpen() throws IOException { - if (state == States.CLOSED) { - throw new IOException("Stream closed"); - } - } - - /** Gets the position relative to the beginning of the QuickTime stream. - *

- * Usually this value is equal to the stream position of the underlying - * ImageOutputStream, but can be larger if the underlying stream already - * contained data. - * - * @return The relative stream position. - * @throws IOException - */ - private long getRelativeStreamPosition() throws IOException { - return out.getStreamPosition() - streamOffset; - } - - /** Seeks relative to the beginning of the QuickTime stream. - *

- * Usually this equal to seeking in the underlying ImageOutputStream, but - * can be different if the underlying stream already contained data. - * - */ - private void seekRelative(long newPosition) throws IOException { - out.seek(newPosition + streamOffset); - } - - private void writeProlog() throws IOException { - // The file has the following structure: - // - // .RIFF AVI - // ..avih (AVI Header Chunk) - // ..LIST strl - // ...strh (Stream Header Chunk) - // ...strf (Stream Format Chunk) - // ..LIST movi - // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) - // ..idx1 (List of video data chunks and their location in the file) - - // The RIFF AVI Chunk holds the complete movie - aviChunk = new CompositeChunk("RIFF", "AVI "); - CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); - - // Write empty AVI Main Header Chunk - we fill the data in later - aviChunk.add(hdrlChunk); - avihChunk = new FixedSizeDataChunk("avih", 56); - avihChunk.seekToEndOfChunk(); - hdrlChunk.add(avihChunk); - - CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); - hdrlChunk.add(strlChunk); - - // Write empty AVI Stream Header Chunk - we fill the data in later - strhChunk = new FixedSizeDataChunk("strh", 56); - strhChunk.seekToEndOfChunk(); - strlChunk.add(strhChunk); - strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); - strfChunk.seekToEndOfChunk(); - strlChunk.add(strfChunk); - - moviChunk = new CompositeChunk("LIST", "movi"); - aviChunk.add(moviChunk); - - - } - - private void writeEpilog() throws IOException { - // Compute values - int duration = 0; - for (Sample s : videoFrames) { - duration += s.duration; - } - long bufferSize = 0; - for (Sample s : videoFrames) { - if (s.length > bufferSize) { - bufferSize = s.length; - } - } - - - DataChunkOutputStream d; - - /* Create Idx1 Chunk and write data - * ------------- - typedef struct _avioldindex { - FOURCC fcc; - DWORD cb; - struct _avioldindex_entry { - DWORD dwChunkId; - DWORD dwFlags; - DWORD dwOffset; - DWORD dwSize; - } aIndex[]; - } AVIOLDINDEX; - */ - DataChunk idx1Chunk = new DataChunk("idx1"); - aviChunk.add(idx1Chunk); - d = idx1Chunk.getOutputStream(); - long moviListOffset = moviChunk.offset + 8; - //moviListOffset = 0; - for (Sample f : videoFrames) { - - d.writeType(f.chunkType); // dwChunkId - // Specifies a FOURCC that identifies a stream in the AVI file. The - // FOURCC must have the form 'xxyy' where xx is the stream number and yy - // is a two-character code that identifies the contents of the stream: - // - // Two-character code Description - // db Uncompressed video frame - // dc Compressed video frame - // pc Palette change - // wb Audio data - - d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// - | (f.isSync ? 0x10 : 0x0)); // dwFlags - // Specifies a bitwise combination of zero or more of the following - // flags: - // - // Value Name Description - // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. - // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. - // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the - // stream. For example, this flag should be set for - // palette changes. - - d.writeUInt(f.offset - moviListOffset); // dwOffset - // Specifies the location of the data chunk in the file. The value - // should be specified as an offset, in bytes, from the start of the - // 'movi' list; however, in some AVI files it is given as an offset from - // the start of the file. - - d.writeUInt(f.length); // dwSize - // Specifies the size of the data chunk, in bytes. - } - idx1Chunk.finish(); - - /* Write Data into AVI Main Header Chunk - * ------------- - * The AVIMAINHEADER structure defines global information in an AVI file. - * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx - typedef struct _avimainheader { - FOURCC fcc; - DWORD cb; - DWORD dwMicroSecPerFrame; - DWORD dwMaxBytesPerSec; - DWORD dwPaddingGranularity; - DWORD dwFlags; - DWORD dwTotalFrames; - DWORD dwInitialFrames; - DWORD dwStreams; - DWORD dwSuggestedBufferSize; - DWORD dwWidth; - DWORD dwHeight; - DWORD dwReserved[4]; - } AVIMAINHEADER; */ - avihChunk.seekToStartOfData(); - d = avihChunk.getOutputStream(); - - d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame - // Specifies the number of microseconds between frames. - // This value indicates the overall timing for the file. - - d.writeUInt(0); // dwMaxBytesPerSec - // Specifies the approximate maximum data rate of the file. - // This value indicates the number of bytes per second the system - // must handle to present an AVI sequence as specified by the other - // parameters contained in the main header and stream header chunks. - - d.writeUInt(0); // dwPaddingGranularity - // Specifies the alignment for data, in bytes. Pad the data to multiples - // of this value. - - d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) - // Contains a bitwise combination of zero or more of the following - // flags: - // - // Value Name Description - // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. - // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the - // index, rather than the physical ordering of the - // chunks in the file, to determine the order of - // presentation of the data. For example, this flag - // could be used to create a list of frames for - // editing. - // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. - // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially - // allocated file used for capturing real-time - // video. Applications should warn the user before - // writing over a file with this flag set because - // the user probably defragmented this file. - // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted - // data and software. When this flag is used, - // software should not permit the data to be - // duplicated. - - d.writeUInt(videoFrames.size()); // dwTotalFrames - // Specifies the total number of frames of data in the file. - - d.writeUInt(0); // dwInitialFrames - // Specifies the initial frame for interleaved files. Noninterleaved - // files should specify zero. If you are creating interleaved files, - // specify the number of frames in the file prior to the initial frame - // of the AVI sequence in this member. - // To give the audio driver enough audio to work with, the audio data in - // an interleaved file must be skewed from the video data. Typically, - // the audio data should be moved forward enough frames to allow - // approximately 0.75 seconds of audio data to be preloaded. The - // dwInitialRecords member should be set to the number of frames the - // audio is skewed. Also set the same value for the dwInitialFrames - // member of the AVISTREAMHEADER structure in the audio stream header - - d.writeUInt(1); // dwStreams - // Specifies the number of streams in the file. For example, a file with - // audio and video has two streams. - - d.writeUInt(bufferSize); // dwSuggestedBufferSize - // Specifies the suggested buffer size for reading the file. Generally, - // this size should be large enough to contain the largest chunk in the - // file. If set to zero, or if it is too small, the playback software - // will have to reallocate memory during playback, which will reduce - // performance. For an interleaved file, the buffer size should be large - // enough to read an entire record, and not just a chunk. - - - d.writeUInt(imgWidth); // dwWidth - // Specifies the width of the AVI file in pixels. - - d.writeUInt(imgHeight); // dwHeight - // Specifies the height of the AVI file in pixels. - - d.writeUInt(0); // dwReserved[0] - d.writeUInt(0); // dwReserved[1] - d.writeUInt(0); // dwReserved[2] - d.writeUInt(0); // dwReserved[3] - // Reserved. Set this array to zero. - - /* Write Data into AVI Stream Header Chunk - * ------------- - * The AVISTREAMHEADER structure contains information about one stream - * in an AVI file. - * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx - typedef struct _avistreamheader { - FOURCC fcc; - DWORD cb; - FOURCC fccType; - FOURCC fccHandler; - DWORD dwFlags; - WORD wPriority; - WORD wLanguage; - DWORD dwInitialFrames; - DWORD dwScale; - DWORD dwRate; - DWORD dwStart; - DWORD dwLength; - DWORD dwSuggestedBufferSize; - DWORD dwQuality; - DWORD dwSampleSize; - struct { - short int left; - short int top; - short int right; - short int bottom; - } rcFrame; - } AVISTREAMHEADER; - */ - strhChunk.seekToStartOfData(); - d = strhChunk.getOutputStream(); - d.writeType("vids"); // fccType - vids for video stream - // Contains a FOURCC that specifies the type of the data contained in - // the stream. The following standard AVI values for video and audio are - // defined: - // - // FOURCC Description - // 'auds' Audio stream - // 'mids' MIDI stream - // 'txts' Text stream - // 'vids' Video stream - - switch (videoFormat) { - case RAW: - d.writeType("DIB "); // fccHandler - DIB for Raw RGB - break; - case RLE: - d.writeType("RLE "); // fccHandler - Microsoft RLE - break; - case JPG: - d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG - break; - case PNG: - default: - d.writeType("png "); // fccHandler - png for PNG - break; - } - // Optionally, contains a FOURCC that identifies a specific data - // handler. The data handler is the preferred handler for the stream. - // For audio and video streams, this specifies the codec for decoding - // the stream. - - if (imgDepth <= 8) { - d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES - } else { - d.writeUInt(0); // dwFlags - } - - // Contains any flags for the data stream. The bits in the high-order - // word of these flags are specific to the type of data contained in the - // stream. The following standard flags are defined: - // - // Value Name Description - // AVISF_DISABLED 0x00000001 Indicates this stream should not - // be enabled by default. - // AVISF_VIDEO_PALCHANGES 0x00010000 - // Indicates this video stream contains - // palette changes. This flag warns the playback - // software that it will need to animate the - // palette. - - d.writeUShort(0); // wPriority - // Specifies priority of a stream type. For example, in a file with - // multiple audio streams, the one with the highest priority might be - // the default stream. - - d.writeUShort(0); // wLanguage - // Language tag. - - d.writeUInt(0); // dwInitialFrames - // Specifies how far audio data is skewed ahead of the video frames in - // interleaved files. Typically, this is about 0.75 seconds. If you are - // creating interleaved files, specify the number of frames in the file - // prior to the initial frame of the AVI sequence in this member. For - // more information, see the remarks for the dwInitialFrames member of - // the AVIMAINHEADER structure. - - d.writeUInt(timeScale); // dwScale - // Used with dwRate to specify the time scale that this stream will use. - // Dividing dwRate by dwScale gives the number of samples per second. - // For video streams, this is the frame rate. For audio streams, this - // rate corresponds to the time needed to play nBlockAlign bytes of - // audio, which for PCM audio is the just the sample rate. - - d.writeUInt(frameRate); // dwRate - // See dwScale. - - d.writeUInt(0); // dwStart - // Specifies the starting time for this stream. The units are defined by - // the dwRate and dwScale members in the main file header. Usually, this - // is zero, but it can specify a delay time for a stream that does not - // start concurrently with the file. - - d.writeUInt(videoFrames.size()); // dwLength - // Specifies the length of this stream. The units are defined by the - // dwRate and dwScale members of the stream's header. - - d.writeUInt(bufferSize); // dwSuggestedBufferSize - // Specifies how large a buffer should be used to read this stream. - // Typically, this contains a value corresponding to the largest chunk - // present in the stream. Using the correct buffer size makes playback - // more efficient. Use zero if you do not know the correct buffer size. - - d.writeInt(-1); // dwQuality - // Specifies an indicator of the quality of the data in the stream. - // Quality is represented as a number between 0 and 10,000. - // For compressed data, this typically represents the value of the - // quality parameter passed to the compression software. If set to –1, - // drivers use the default quality value. - - d.writeUInt(0); // dwSampleSize - // Specifies the size of a single sample of data. This is set to zero - // if the samples can vary in size. If this number is nonzero, then - // multiple samples of data can be grouped into a single chunk within - // the file. If it is zero, each sample of data (such as a video frame) - // must be in a separate chunk. For video streams, this number is - // typically zero, although it can be nonzero if all video frames are - // the same size. For audio streams, this number should be the same as - // the nBlockAlign member of the WAVEFORMATEX structure describing the - // audio. - - d.writeUShort(0); // rcFrame.left - d.writeUShort(0); // rcFrame.top - d.writeUShort(imgWidth); // rcFrame.right - d.writeUShort(imgHeight); // rcFrame.bottom - // Specifies the destination rectangle for a text or video stream within - // the movie rectangle specified by the dwWidth and dwHeight members of - // the AVI main header structure. The rcFrame member is typically used - // in support of multiple video streams. Set this rectangle to the - // coordinates corresponding to the movie rectangle to update the whole - // movie rectangle. Units for this member are pixels. The upper-left - // corner of the destination rectangle is relative to the upper-left - // corner of the movie rectangle. - - /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk - /* ------------- - * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx - typedef struct tagBITMAPINFOHEADER { - DWORD biSize; - LONG biWidth; - LONG biHeight; - WORD biPlanes; - WORD biBitCount; - DWORD biCompression; - DWORD biSizeImage; - LONG biXPelsPerMeter; - LONG biYPelsPerMeter; - DWORD biClrUsed; - DWORD biClrImportant; - } BITMAPINFOHEADER; - */ - strfChunk.seekToStartOfData(); - d = strfChunk.getOutputStream(); - d.writeUInt(40); // biSize - // Specifies the number of bytes required by the structure. This value - // does not include the size of the color table or the size of the color - // masks, if they are appended to the end of structure. - - d.writeInt(imgWidth); // biWidth - // Specifies the width of the bitmap, in pixels. - - d.writeInt(imgHeight); // biHeight - // Specifies the height of the bitmap, in pixels. - // - // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is - // a bottom-up DIB with the origin at the lower left corner. If biHeight - // is negative, the bitmap is a top-down DIB with the origin at the - // upper left corner. - // For YUV bitmaps, the bitmap is always top-down, regardless of the - // sign of biHeight. Decoders should offer YUV formats with postive - // biHeight, but for backward compatibility they should accept YUV - // formats with either positive or negative biHeight. - // For compressed formats, biHeight must be positive, regardless of - // image orientation. - - d.writeShort(1); // biPlanes - // Specifies the number of planes for the target device. This value must - // be set to 1. - - d.writeShort(imgDepth); // biBitCount - // Specifies the number of bits per pixel (bpp). For uncompressed - // formats, this value is the average number of bits per pixel. For - // compressed formats, this value is the implied bit depth of the - // uncompressed image, after the image has been decoded. - - switch (videoFormat) { - case RAW: - default: - d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB - break; - case RLE: - if (imgDepth == 8) { - d.writeInt(1); // biCompression - BI_RLE8 - } else if (imgDepth == 4) { - d.writeInt(2); // biCompression - BI_RLE4 - } else { - throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); - } - break; - case JPG: - d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG - break; - case PNG: - d.writeType("png "); // biCompression - png for PNG - break; - } - // For compressed video and YUV formats, this member is a FOURCC code, - // specified as a DWORD in little-endian order. For example, YUYV video - // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC - // Codes. - // - // For uncompressed RGB formats, the following values are possible: - // - // Value Description - // BI_RGB 0x00000000 Uncompressed RGB. - // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. - // Valid for 16-bpp and 32-bpp bitmaps. - // - // Note that BI_JPG and BI_PNG are not valid video formats. - // - // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is - // always RGB 555. If biCompression equals BI_BITFIELDS, the format is - // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE - // structure to determine the specific RGB type. - - switch (videoFormat) { - case RAW: - d.writeInt(0); // biSizeImage - break; - case RLE: - case JPG: - case PNG: - default: - if (imgDepth == 4) { - d.writeInt(imgWidth * imgHeight / 2); // biSizeImage - } else { - int bytesPerPixel = Math.max(1, imgDepth / 8); - d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage - } - break; - } - // Specifies the size, in bytes, of the image. This can be set to 0 for - // uncompressed RGB bitmaps. - - d.writeInt(0); // biXPelsPerMeter - // Specifies the horizontal resolution, in pixels per meter, of the - // target device for the bitmap. - - d.writeInt(0); // biYPelsPerMeter - // Specifies the vertical resolution, in pixels per meter, of the target - // device for the bitmap. - - d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed - // Specifies the number of color indices in the color table that are - // actually used by the bitmap. - - d.writeInt(0); // biClrImportant - // Specifies the number of color indices that are considered important - // for displaying the bitmap. If this value is zero, all colors are - // important. - - if (palette != null) { - for (int i = 0, n = palette.getMapSize(); i < n; ++i) { - /* - * typedef struct tagRGBQUAD { - BYTE rgbBlue; - BYTE rgbGreen; - BYTE rgbRed; - BYTE rgbReserved; // This member is reserved and must be zero. - } RGBQUAD; - */ - d.write(palette.getBlue(i)); - d.write(palette.getGreen(i)); - d.write(palette.getRed(i)); - d.write(0); - } - } - - - // ----------------- - aviChunk.finish(); - } -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/AVIVideoRecorder.java --- a/src/com/aurellem/capture/AVIVideoRecorder.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -package com.aurellem.capture; - -import java.awt.image.BufferedImage; -import java.io.File; -import java.io.IOException; - - -public class AVIVideoRecorder extends AbstractVideoRecorder{ - - AVIOutputStream out = null; - boolean videoReady = false; - BufferedImage frame; - - public AVIVideoRecorder(File output) throws IOException { - super(output); - this.out = new AVIOutputStream(output, AVIOutputStream.VideoFormat.PNG, 24); - this.out.setVideoCompressionQuality(1.0f); - } - - - public void initVideo (){ - frame = new BufferedImage( - width, height, - BufferedImage.TYPE_INT_RGB); - out.setFrameRate((int) Math.round(this.fps)); - out.setTimeScale(1); - out.setVideoDimension(width, height); - this.videoReady = true; - } - - public void record(BufferedImage rawFrame) { - if (!videoReady){initVideo();} - this.frame.getGraphics().drawImage(rawFrame, 0, 0, null); - try {out.writeFrame(frame);} - catch (IOException e){e.printStackTrace();} - } - - public void finish() { - System.out.println("I'm finished! <3"); - try {out.close();} - catch (IOException e) {e.printStackTrace();} - } - - - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/AbstractVideoRecorder.java --- a/src/com/aurellem/capture/AbstractVideoRecorder.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,143 +0,0 @@ -package com.aurellem.capture; - -import java.awt.image.BufferedImage; -import java.io.File; -import java.io.IOException; -import java.nio.ByteBuffer; - -import com.jme3.app.Application; -import com.jme3.app.state.AppState; -import com.jme3.app.state.AppStateManager; -import com.jme3.post.SceneProcessor; -import com.jme3.renderer.Camera; -import com.jme3.renderer.RenderManager; -import com.jme3.renderer.ViewPort; -import com.jme3.renderer.queue.RenderQueue; -import com.jme3.texture.FrameBuffer; -import com.jme3.util.BufferUtils; -import com.jme3.util.Screenshots; - -/** - * VideoProcessor copies the frames it receives to video. - * To ensure smooth video at a constant framerate, you should set your - * application's timer to a new {@link IsoTimer}. This class will - * auto-determine the framerate of the video based on the time difference - * between the first two frames it receives, although you can manually set - * the framerate by calling setFps(newFramerate). Be sure to - * place this processor *after* any other processors whose effects you want - * to be included in the output video. You can attach multiple - * VideoProcessors to the same ViewPort. - * - * For example, - * - * someViewPort.addProcessor(new VideoProcessor(file1)); - * someViewPort.addProcessor(someShadowRenderer); - * someViewPort.addProcessor(new VideoProcessor(file2)); - * - * - * will output a video without shadows to file1 and a video - * with shadows to file2 - * - * @author Robert McIntyre - * - */ - -public abstract class AbstractVideoRecorder - implements SceneProcessor, IVideoRecorder, AppState{ - - final File output; - Camera camera; - int width; - int height; - String targetFileName; - FrameBuffer frameBuffer; - Double fps = null; - RenderManager renderManager; - ByteBuffer byteBuffer; - BufferedImage rawFrame; - boolean isInitilized = false; - boolean paused = false; - - public AbstractVideoRecorder(File output) throws IOException { - this.output = output; - this.targetFileName = this.output.getCanonicalPath(); - } - - - public double getFps() {return this.fps;} - - public AbstractVideoRecorder setFps(double fps) { - this.fps = fps; - return this; - } - - public void initialize(RenderManager rm, ViewPort viewPort) { - Camera camera = viewPort.getCamera(); - this.width = camera.getWidth(); - this.height = camera.getHeight(); - - rawFrame = new BufferedImage(width, height, - BufferedImage.TYPE_4BYTE_ABGR); - byteBuffer = BufferUtils.createByteBuffer(width * height * 4 ); - this.renderManager = rm; - this.isInitilized = true; - } - - public void reshape(ViewPort vp, int w, int h) {} - - public boolean isInitialized() {return this.isInitilized;} - - public void preFrame(float tpf) { - if (null == this.fps){ - this.setFps(1.0 / tpf);} - } - - public void postQueue(RenderQueue rq) {} - - public void postFrame(FrameBuffer out) { - if (!this.paused){ - byteBuffer.clear(); - renderManager.getRenderer().readFrameBuffer(out, byteBuffer); - Screenshots.convertScreenShot(byteBuffer, rawFrame); - record(rawFrame); - } - } - - public void cleanup(){ - this.pause(); - this.finish(); - }; - - public void pause(){ - this.paused = true; - } - - public void start(){ - this.paused = false; - } - - // methods from AppState - public void initialize(AppStateManager stateManager, Application app) {} - - public void setEnabled(boolean active) { - if (active) {this.start();} - else {this.pause();} - } - - public boolean isEnabled() { - return this.paused; - } - - public void stateAttached(AppStateManager stateManager) {} - - - public void stateDetached(AppStateManager stateManager) { - this.pause(); - this.finish(); - } - - public void update(float tpf) {} - public void render(RenderManager rm) {} - public void postRender() {} - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/AudioSend.java --- a/src/com/aurellem/capture/AudioSend.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,260 +0,0 @@ -package com.aurellem.capture; - -import java.lang.reflect.Field; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Vector; -import java.util.concurrent.CountDownLatch; -import java.util.logging.Level; -import java.util.logging.Logger; - -import org.lwjgl.LWJGLException; -import org.lwjgl.openal.AL; -import org.lwjgl.openal.AL10; -import org.lwjgl.openal.ALCdevice; -import org.lwjgl.openal.OpenALException; - -import com.jme3.audio.Listener; -import com.jme3.audio.lwjgl.LwjglAudioRenderer; -import com.jme3.math.Vector3f; -import com.jme3.util.BufferUtils; - -public class AudioSend - extends LwjglAudioRenderer implements MultiListener { - - /** - * Keeps track of all the listeners which have been registered so far. - * The first element is null, which represents the zeroth - * LWJGL listener which is created automatically. - */ - public Vector listeners = new Vector(); - - public void initialize(){ - super.initialize(); - listeners.add(null); - } - - /** - * This is to call the native methods which require the OpenAL device ID. - * currently it is obtained through reflection. - */ - private long deviceID; - - /** - * To ensure that deviceID and listeners are - * properly initialized before any additional listeners are added. - */ - private CountDownLatch latch = new CountDownLatch(1); - - private void waitForInit(){ - try {latch.await();} - catch (InterruptedException e) {e.printStackTrace();} - } - - /** - * Each listener (including the main LWJGL listener) can be registered - * with a SoundProcessor, which this Renderer will call whenever - * there is new audio data to be processed. - */ - public HashMap soundProcessorMap = - new HashMap(); - - - /** - * Create a new slave context on the recorder device which will render all the - * sounds in the main LWJGL context with respect to this listener. - */ - public void addListener(Listener l) { - try {this.latch.await();} - catch (InterruptedException e) {e.printStackTrace();} - this.addListener(); - this.listeners.add(l); - } - - /** - * Whenever new data is rendered in the perspective of this listener, - * this Renderer will send that data to the SoundProcessor of your choosing. - */ - public void registerSoundProcessor(Listener l, SoundProcessor sp) { - this.soundProcessorMap.put(l, sp); - } - - /** - * Registers a SoundProcessor for the main LWJGL context. IF all you want to - * do is record the sound you would normally hear in your application, then - * this is the only method you have to worry about. - */ - public void registerSoundProcessor(SoundProcessor sp){ - // register a sound processor for the default listener. - this.soundProcessorMap.put(null, sp); - } - - private static final Logger logger = - Logger.getLogger(AudioSend.class.getName()); - - - //////////// Native Methods - - /** This establishes the LWJGL context as the context which will be copies to all - * other contexts. It must be called before any calls to addListener(); - */ - public void initDevice(){ - ninitDevice(this.deviceID);} - public static native void ninitDevice(long device); - - /** - * The send device does not automatically process sound. This step function will cause - * the desired number of samples to be processed for each listener. The results will then - * be available via calls to getSamples() for each listener. - * @param samples - */ - public void step(int samples){ - nstep(this.deviceID, samples);} - public static native void nstep(long device, int samples); - - /** - * Retrieve the final rendered sound for a particular listener. contextNum == 0 - * is the main LWJGL context. - * @param buffer - * @param samples - * @param contextNum - */ - public void getSamples(ByteBuffer buffer, int samples, int contextNum){ - ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);} - public static native void ngetSamples( - long device, ByteBuffer buffer, int position, int samples, int contextNum); - - /** - * Create an additional listener on the recorder device. The device itself will manage - * this listener and synchronize it with the main LWJGL context. Processed sound samples - * for this listener will be available via a call to getSamples() with - * contextNum equal to the number of times this method has been called. - */ - public void addListener(){naddListener(this.deviceID);} - public static native void naddListener(long device); - - /** - * This will internally call alListener3f in the appropriate slave context and update - * that context's listener's parameters. Calling this for a number greater than the current - * number of slave contexts will have no effect. - * @param pname - * @param v1 - * @param v2 - * @param v3 - * @param contextNum - */ - public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){ - nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);} - public static native void - nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum); - - /** - * This will internally call alListenerf in the appropriate slave context and update - * that context's listener's parameters. Calling this for a number greater than the current - * number of slave contexts will have no effect. - * @param pname - * @param v1 - * @param contextNum - */ - public void setNthListenerf(int pname, float v1, int contextNum){ - nsetNthListenerf(pname, v1, this.deviceID, contextNum);} - public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum); - - /** - * Instead of taking whatever device is available on the system, this call - * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited - * capacity. For each listener, the device renders it not to the sound device, but - * instead to buffers which it makes available via JNI. - */ - public void initInThread(){ - try{ - if (!AL.isCreated()){ - AL.create("Multiple Audio Send", 44100, 60, false); - } - }catch (OpenALException ex){ - logger.log(Level.SEVERE, "Failed to load audio library", ex); - System.exit(1); - return; - }catch (LWJGLException ex){ - logger.log(Level.SEVERE, "Failed to load audio library", ex); - System.exit(1); - return; - } - super.initInThread(); - - ALCdevice device = AL.getDevice(); - - // RLM: use reflection to grab the ID of our device for use later. - try { - Field deviceIDField; - deviceIDField = ALCdevice.class.getDeclaredField("device"); - deviceIDField.setAccessible(true); - try {deviceID = (Long)deviceIDField.get(device);} - catch (IllegalArgumentException e) {e.printStackTrace();} - catch (IllegalAccessException e) {e.printStackTrace();} - deviceIDField.setAccessible(false);} - catch (SecurityException e) {e.printStackTrace();} - catch (NoSuchFieldException e) {e.printStackTrace();} - - // the LWJGL context must be established as the master context before - // any other listeners can be created on this device. - initDevice(); - // Now, everything is initialized, and it is safe to add more listeners. - latch.countDown(); - } - - - public void cleanup(){ - for(SoundProcessor sp : this.soundProcessorMap.values()){ - sp.cleanup(); - } - super.cleanup(); - } - - public void updateAllListeners(){ - for (int i = 0; i < this.listeners.size(); i++){ - Listener lis = this.listeners.get(i); - if (null != lis){ - Vector3f location = lis.getLocation(); - Vector3f velocity = lis.getVelocity(); - Vector3f orientation = lis.getUp(); - float gain = lis.getVolume(); - setNthListener3f(AL10.AL_POSITION, - location.x, location.y, location.z, i); - setNthListener3f(AL10.AL_VELOCITY, - velocity.x, velocity.y, velocity.z, i); - setNthListener3f(AL10.AL_ORIENTATION, - orientation.x, orientation.y, orientation.z, i); - setNthListenerf(AL10.AL_GAIN, gain, i); - } - } - } - - - public final static int BYTES_PER_SAMPLE = 4; - private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); - - public void dispatchAudio(float tpf){ - int samplesToGet = (int) (tpf * 44100); - try {latch.await();} - catch (InterruptedException e) {e.printStackTrace();} - step(samplesToGet); - updateAllListeners(); - - for (int i = 0; i < this.listeners.size(); i++){ - buffer.clear(); - this.getSamples(buffer, samplesToGet, i); - SoundProcessor sp = - this.soundProcessorMap.get(this.listeners.get(i)); - if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} - } - - } - - public void update(float tpf){ - super.update(tpf); - dispatchAudio(tpf); - } - -} - diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/Capture.java --- a/src/com/aurellem/capture/Capture.java Tue Oct 25 12:29:40 2011 -0700 +++ b/src/com/aurellem/capture/Capture.java Wed Oct 26 08:54:12 2011 -0700 @@ -3,6 +3,7 @@ import java.io.File; import java.io.IOException; +import com.aurellem.capture.video.AVIVideoRecorder; import com.jme3.app.Application; import com.jme3.math.ColorRGBA; diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/DataChunkOutputStream.java --- a/src/com/aurellem/capture/DataChunkOutputStream.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,217 +0,0 @@ -/** - * @(#)DataChunkOutputStream.java 1.1 2011-01-17 - * - * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. - * All rights reserved. - * - * You may not use, copy or modify this file, except in compliance with the - * license agreement you entered into with Werner Randelshofer. - * For details see accompanying license terms. - */ -package com.aurellem.capture; - -import java.io.*; - -/** - * This output stream filter supports common data types used inside - * of AVI RIFF Data Chunks. - * - * @author Werner Randelshofer - * @version 1.1 2011-01-17 Adds functionality for blocking flush and close. - *
1.0.1 2010-04-05 Removed unused constants. - *
1.0 2008-08-11 Created. - */ -public class DataChunkOutputStream extends FilterOutputStream { - - /** - * The number of bytes written to the data output stream so far. - * If this counter overflows, it will be wrapped to Integer.MAX_VALUE. - */ - protected long written; - - /** Whether flush and close request shall be forwarded to underlying stream.*/ - private boolean forwardFlushAndClose; - - public DataChunkOutputStream(OutputStream out) { - this(out,true); - } - public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) { - super(out); - this.forwardFlushAndClose=forwardFlushAndClose; - } - - /** - * Writes an chunk type identifier (4 bytes). - * @param s A string with a length of 4 characters. - */ - public void writeType(String s) throws IOException { - if (s.length() != 4) { - throw new IllegalArgumentException("type string must have 4 characters"); - } - - try { - out.write(s.getBytes("ASCII"), 0, 4); - incCount(4); - } catch (UnsupportedEncodingException e) { - throw new InternalError(e.toString()); - } - } - - /** - * Writes out a byte to the underlying output stream as - * a 1-byte value. If no exception is thrown, the counter - * written is incremented by 1. - * - * @param v a byte value to be written. - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#out - */ - public final void writeByte(int v) throws IOException { - out.write(v); - incCount(1); - } - - /** - * Writes len bytes from the specified byte array - * starting at offset off to the underlying output stream. - * If no exception is thrown, the counter written is - * incremented by len. - * - * @param b the data. - * @param off the start offset in the data. - * @param len the number of bytes to write. - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#out - */ - @Override - public synchronized void write(byte b[], int off, int len) - throws IOException { - out.write(b, off, len); - incCount(len); - } - - /** - * Writes the specified byte (the low eight bits of the argument - * b) to the underlying output stream. If no exception - * is thrown, the counter written is incremented by - * 1. - *

- * Implements the write method of OutputStream. - * - * @param b the byte to be written. - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#out - */ - @Override - public synchronized void write(int b) throws IOException { - out.write(b); - incCount(1); - } - - /** - * Writes an int to the underlying output stream as four - * bytes, high byte first. If no exception is thrown, the counter - * written is incremented by 4. - * - * @param v an int to be written. - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#out - */ - public void writeInt(int v) throws IOException { - out.write((v >>> 0) & 0xff); - out.write((v >>> 8) & 0xff); - out.write((v >>> 16) & 0xff); - out.write((v >>> 24) & 0xff); - incCount(4); - } - - /** - * Writes an unsigned 32 bit integer value. - * - * @param v The value - * @throws java.io.IOException - */ - public void writeUInt(long v) throws IOException { - out.write((int) ((v >>> 0) & 0xff)); - out.write((int) ((v >>> 8) & 0xff)); - out.write((int) ((v >>> 16) & 0xff)); - out.write((int) ((v >>> 24) & 0xff)); - incCount(4); - } - - /** - * Writes a signed 16 bit integer value. - * - * @param v The value - * @throws java.io.IOException - */ - public void writeShort(int v) throws IOException { - out.write((int) ((v >>> 0) & 0xff)); - out.write((int) ((v >> 8) & 0xff)); - incCount(2); - } - - public void writeLong(long v) throws IOException { - out.write((int) (v >>> 0) & 0xff); - out.write((int) (v >>> 8) & 0xff); - out.write((int) (v >>> 16) & 0xff); - out.write((int) (v >>> 24) & 0xff); - out.write((int) (v >>> 32) & 0xff); - out.write((int) (v >>> 40) & 0xff); - out.write((int) (v >>> 48) & 0xff); - out.write((int) (v >>> 56) & 0xff); - incCount(8); - } - - public void writeUShort(int v) throws IOException { - out.write((int) ((v >>> 0) & 0xff)); - out.write((int) ((v >> 8) & 0xff)); - incCount(2); - } - - /** - * Increases the written counter by the specified value - * until it reaches Long.MAX_VALUE. - */ - protected void incCount(int value) { - long temp = written + value; - if (temp < 0) { - temp = Long.MAX_VALUE; - } - written = temp; - } - - /** - * Returns the current value of the counter written, - * the number of bytes written to this data output stream so far. - * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. - * - * @return the value of the written field. - * @see java.io.DataOutputStream#written - */ - public final long size() { - return written; - } - - /** - * Sets the value of the counter written to 0. - */ - public void clearCount() { - written = 0; - } - - @Override - public void close() throws IOException { - if (forwardFlushAndClose) { - super.close(); - } - } - - @Override - public void flush() throws IOException { - if (forwardFlushAndClose) { - super.flush(); - } - } - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/FileAudioRenderer.java --- a/src/com/aurellem/capture/FileAudioRenderer.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,85 +0,0 @@ -package com.aurellem.capture; - -import com.jme3.audio.AudioData; -import com.jme3.audio.AudioNode; -import com.jme3.audio.AudioParam; -import com.jme3.audio.AudioRenderer; -import com.jme3.audio.Environment; -import com.jme3.audio.Listener; -import com.jme3.audio.ListenerParam; - -public class FileAudioRenderer implements AudioRenderer{ - - - public void setListener(Listener listener) { - // TODO Auto-generated method stub - - } - - - public void setEnvironment(Environment env) { - // TODO Auto-generated method stub - - } - - @Override - public void playSourceInstance(AudioNode src) { - // TODO Auto-generated method stub - - } - - @Override - public void playSource(AudioNode src) { - // TODO Auto-generated method stub - - } - - @Override - public void pauseSource(AudioNode src) { - // TODO Auto-generated method stub - - } - - @Override - public void stopSource(AudioNode src) { - // TODO Auto-generated method stub - - } - - @Override - public void updateSourceParam(AudioNode src, AudioParam param) { - // TODO Auto-generated method stub - - } - - @Override - public void updateListenerParam(Listener listener, ListenerParam param) { - // TODO Auto-generated method stub - - } - - @Override - public void deleteAudioData(AudioData ad) { - // TODO Auto-generated method stub - - } - - @Override - public void initialize() { - // TODO Auto-generated method stub - - } - - @Override - public void update(float tpf) { - // TODO Auto-generated method stub - - } - - @Override - public void cleanup() { - // TODO Auto-generated method stub - - } - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/IVideoRecorder.java --- a/src/com/aurellem/capture/IVideoRecorder.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -package com.aurellem.capture; - -import java.awt.image.BufferedImage; - -public interface IVideoRecorder{ - - void record(BufferedImage image); - - void pause(); - - void start(); - - /** - * closes the video file, writing appropriate headers, trailers, etc. - * After this is called, no more recording can be done. - */ - void finish(); - -} - - diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/ImageOutputStreamAdapter.java --- a/src/com/aurellem/capture/ImageOutputStreamAdapter.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,144 +0,0 @@ -/* - * @(#)ImageOutputStreamAdapter.java 1.1 2011-01-07 - * - * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. - * All rights reserved. - * - * You may not use, copy or modify this file, except in compliance with the - * license agreement you entered into with Werner Randelshofer. - * For details see accompanying license terms. - */ -package com.aurellem.capture; - -import java.io.FilterOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import javax.imageio.stream.ImageOutputStream; - -/** - * Adapts an {@code ImageOutputStream} for classes requiring an - * {@code OutputStream}. - * - * @author Werner Randelshofer - * @version 1.1 2011-01-07 Fixes performance. - *
1.0 2010-12-26 Created. - */ -public class ImageOutputStreamAdapter extends OutputStream { - - /** - * The underlying output stream to be filtered. - */ - protected ImageOutputStream out; - - /** - * Creates an output stream filter built on top of the specified - * underlying output stream. - * - * @param out the underlying output stream to be assigned to - * the field this.out for later use, or - * null if this instance is to be - * created without an underlying stream. - */ - public ImageOutputStreamAdapter(ImageOutputStream out) { - this.out = out; - } - - /** - * Writes the specified byte to this output stream. - *

- * The write method of FilterOutputStream - * calls the write method of its underlying output stream, - * that is, it performs out.write(b). - *

- * Implements the abstract write method of OutputStream. - * - * @param b the byte. - * @exception IOException if an I/O error occurs. - */ - @Override - public void write(int b) throws IOException { - out.write(b); - } - - /** - * Writes b.length bytes to this output stream. - *

- * The write method of FilterOutputStream - * calls its write method of three arguments with the - * arguments b, 0, and - * b.length. - *

- * Note that this method does not call the one-argument - * write method of its underlying stream with the single - * argument b. - * - * @param b the data to be written. - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#write(byte[], int, int) - */ - @Override - public void write(byte b[]) throws IOException { - write(b, 0, b.length); - } - - /** - * Writes len bytes from the specified - * byte array starting at offset off to - * this output stream. - *

- * The write method of FilterOutputStream - * calls the write method of one argument on each - * byte to output. - *

- * Note that this method does not call the write method - * of its underlying input stream with the same arguments. Subclasses - * of FilterOutputStream should provide a more efficient - * implementation of this method. - * - * @param b the data. - * @param off the start offset in the data. - * @param len the number of bytes to write. - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#write(int) - */ - @Override - public void write(byte b[], int off, int len) throws IOException { - out.write(b,off,len); - } - - /** - * Flushes this output stream and forces any buffered output bytes - * to be written out to the stream. - *

- * The flush method of FilterOutputStream - * calls the flush method of its underlying output stream. - * - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#out - */ - @Override - public void flush() throws IOException { - out.flush(); - } - - /** - * Closes this output stream and releases any system resources - * associated with the stream. - *

- * The close method of FilterOutputStream - * calls its flush method, and then calls the - * close method of its underlying output stream. - * - * @exception IOException if an I/O error occurs. - * @see java.io.FilterOutputStream#flush() - * @see java.io.FilterOutputStream#out - */ - @Override - public void close() throws IOException { - try { - flush(); - } finally { - out.close(); - } - } -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/Main.java --- a/src/com/aurellem/capture/Main.java Tue Oct 25 12:29:40 2011 -0700 +++ b/src/com/aurellem/capture/Main.java Wed Oct 26 08:54:12 2011 -0700 @@ -16,6 +16,8 @@ import java.io.*; import java.util.Random; +import com.aurellem.capture.video.AVIOutputStream; + /** * Main. diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/MicrosoftRLEEncoder.java --- a/src/com/aurellem/capture/MicrosoftRLEEncoder.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,400 +0,0 @@ -/* - * @(#)AppleRLEEncoder.java 1.1.1 2011-01-17 - * - * Copyright © 2011 Werner Randelshofer, Immensee, Switzerland. - * All rights reserved. - * - * You may not use, copy or modify this file, except in compliance with the - * license agreement you entered into with Werner Randelshofer. - * For details see accompanying license terms. - */ -package com.aurellem.capture; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Arrays; - -/** - * Implements the run length encoding of the Microsoft RLE format. - *

- * Each line of a frame is compressed individually. A line consists of two-byte - * op-codes optionally followed by data. The end of the line is marked with - * the EOL op-code. - *

- * The following op-codes are supported: - *

- * Example: - *
- * Compressed data         Expanded data
- *
- * 03 04                   04 04 04
- * 05 06                   06 06 06 06 06
- * 00 03 45 56 67 00       45 56 67
- * 02 78                   78 78
- * 00 02 05 01             Move 5 right and 1 down
- * 02 78                   78 78
- * 00 00                   End of line
- * 09 1E                   1E 1E 1E 1E 1E 1E 1E 1E 1E
- * 00 01                   End of RLE bitmap
- * 
- * - * References:
- * http://wiki.multimedia.cx/index.php?title=Microsoft_RLE
- * - * @author Werner Randelshofer - * @version 1.1.1 2011-01-17 Removes unused imports. - *
1.1 2011-01-07 Improves performance. - *
1.0 2011-01-05 Created. - */ -public class MicrosoftRLEEncoder { - - private SeekableByteArrayOutputStream tempSeek=new SeekableByteArrayOutputStream(); - private DataChunkOutputStream temp=new DataChunkOutputStream(tempSeek); - - /** Encodes a 8-bit key frame. - * - * @param temp The output stream. Must be set to Big-Endian. - * @param data The image data. - * @param offset The offset to the first pixel in the data array. - * @param length The width of the image in data elements. - * @param step The number to add to offset to get to the next scanline. - */ - public void writeKey8(OutputStream out, byte[] data, int offset, int length, int step, int height) - throws IOException { - tempSeek.reset(); - int ymax = offset + height * step; - int upsideDown = ymax-step+offset; - - // Encode each scanline separately - for (int y = offset; y < ymax; y += step) { - int xy = upsideDown-y; - int xymax = xy + length; - - int literalCount = 0; - int repeatCount = 0; - for (; xy < xymax; ++xy) { - // determine repeat count - byte v = data[xy]; - for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { - if (data[xy] != v) { - break; - } - } - xy -= repeatCount; - if (repeatCount < 3) { - literalCount++; - if (literalCount == 254) { - temp.write(0);temp.write(literalCount); // Literal OP-code - temp.write(data, xy - literalCount + 1, literalCount); - literalCount = 0; - } - } else { - if (literalCount > 0) { - if (literalCount < 3) { - for (; literalCount > 0; --literalCount) { - temp.write(1); // Repeat OP-code - temp.write(data[xy - literalCount]); - } - } else { - temp.write(0);temp.write(literalCount); // Literal OP-code - temp.write(data, xy - literalCount, literalCount); - if (literalCount % 2 == 1) { - temp.write(0); // pad byte - } - literalCount = 0; - } - } - temp.write(repeatCount); // Repeat OP-code - temp.write(v); - xy += repeatCount - 1; - } - } - - // flush literal run - if (literalCount > 0) { - if (literalCount < 3) { - for (; literalCount > 0; --literalCount) { - temp.write(1); // Repeat OP-code - temp.write(data[xy - literalCount]); - } - } else { - temp.write(0);temp.write(literalCount); - temp.write(data, xy - literalCount, literalCount); - if (literalCount % 2 == 1) { - temp.write(0); // pad byte - } - } - literalCount = 0; - } - - temp.write(0);temp.write(0x0000);// End of line - } - temp.write(0);temp.write(0x0001);// End of bitmap - tempSeek.toOutputStream(out); - } - - /** Encodes a 8-bit delta frame. - * - * @param temp The output stream. Must be set to Big-Endian. - * @param data The image data. - * @param prev The image data of the previous frame. - * @param offset The offset to the first pixel in the data array. - * @param length The width of the image in data elements. - * @param step The number to add to offset to get to the next scanline. - */ - public void writeDelta8(OutputStream out, byte[] data, byte[] prev, int offset, int length, int step, int height) - throws IOException { - -tempSeek.reset(); - // Determine whether we can skip lines at the beginning - int ymin; - int ymax = offset + height * step; - int upsideDown = ymax-step+offset; - scanline: - for (ymin = offset; ymin < ymax; ymin += step) { - int xy = upsideDown-ymin; - int xymax = xy + length; - for (; xy < xymax; ++xy) { - if (data[xy] != prev[xy]) { - break scanline; - } - } - } - - if (ymin == ymax) { - // => Frame is identical to previous one - temp.write(0);temp.write(0x0001); // end of bitmap - return; - } - - if (ymin > offset) { - int verticalOffset = ymin / step; - while (verticalOffset > 255) { - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(0); // horizontal offset - temp.write(255); // vertical offset - verticalOffset -= 255; - } - if (verticalOffset == 1) { - temp.write(0);temp.write(0x0000); // End of line OP-code - } else { - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(0); // horizontal offset - temp.write(verticalOffset); // vertical offset - } - } - - - // Determine whether we can skip lines at the end - scanline: - for (; ymax > ymin; ymax -= step) { - int xy = upsideDown-ymax+step; - int xymax = xy + length; - for (; xy < xymax; ++xy) { - if (data[xy] != prev[xy]) { - break scanline; - } - } - } - //System.out.println("MicrosoftRLEEncoder ymin:" + ymin / step + " ymax" + ymax / step); - - - // Encode each scanline - int verticalOffset = 0; - for (int y = ymin; y < ymax; y += step) { - int xy = upsideDown-y; - int xymax = xy + length; - - // determine skip count - int skipCount = 0; - for (; xy < xymax; ++xy, ++skipCount) { - if (data[xy] != prev[xy]) { - break; - } - } - if (skipCount == length) { - // => the entire line can be skipped - ++verticalOffset; - if (verticalOffset == 255) { - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(0); // horizontal offset - temp.write(255); // vertical offset - verticalOffset = 0; - } - continue; - } - - if (verticalOffset > 0 || skipCount > 0) { - if (verticalOffset == 1 && skipCount == 0) { - temp.write(0);temp.write(0x0000); // End of line OP-code - } else { - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(Math.min(255, skipCount)); // horizontal offset - skipCount -= 255; - temp.write(verticalOffset); // vertical offset - } - verticalOffset = 0; - } - while (skipCount > 0) { - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(Math.min(255, skipCount)); // horizontal offset - temp.write(0); // vertical offset - skipCount -= 255; - } - - int literalCount = 0; - int repeatCount = 0; - for (; xy < xymax; ++xy) { - // determine skip count - for (skipCount = 0; xy < xymax; ++xy, ++skipCount) { - if (data[xy] != prev[xy]) { - break; - } - } - xy -= skipCount; - - // determine repeat count - byte v = data[xy]; - for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { - if (data[xy] != v) { - break; - } - } - xy -= repeatCount; - - if (skipCount < 4 && xy + skipCount < xymax && repeatCount < 3) { - literalCount++; - if (literalCount == 254) { - temp.write(0);temp.write(literalCount); // Literal OP-code - temp.write(data, xy - literalCount + 1, literalCount); - literalCount = 0; - } - } else { - if (literalCount > 0) { - if (literalCount < 3) { - for (; literalCount > 0; --literalCount) { - temp.write(1); // Repeat OP-code - temp.write(data[xy - literalCount]); - } - } else { - temp.write(0);temp.write(literalCount); - temp.write(data, xy - literalCount, literalCount); - if (literalCount % 2 == 1) { - temp.write(0); // pad byte - } - } - literalCount = 0; - } - if (xy + skipCount == xymax) { - // => we can skip until the end of the line without - // having to write an op-code - xy += skipCount - 1; - } else if (skipCount >= repeatCount) { - while (skipCount > 255) { - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(255); - temp.write(0); - xy += 255; - skipCount -= 255; - } - temp.write(0);temp.write(0x0002); // Skip OP-code - temp.write(skipCount); - temp.write(0); - xy += skipCount - 1; - } else { - temp.write(repeatCount); // Repeat OP-code - temp.write(v); - xy += repeatCount - 1; - } - } - } - - // flush literal run - if (literalCount > 0) { - if (literalCount < 3) { - for (; literalCount > 0; --literalCount) { - temp.write(1); // Repeat OP-code - temp.write(data[xy - literalCount]); - } - } else { - temp.write(0);temp.write(literalCount); - temp.write(data, xy - literalCount, literalCount); - if (literalCount % 2 == 1) { - temp.write(0); // pad byte - } - } - } - - temp.write(0);temp.write(0x0000); // End of line OP-code - } - - temp.write(0);temp.write(0x0001);// End of bitmap - tempSeek.toOutputStream(out); - } - - public static void main(String[] args) { - byte[] data = {// - 8, 2, 3, 4, 4, 3,7,7,7, 8,// - 8, 1, 1, 1, 1, 2,7,7,7, 8,// - 8, 0, 2, 0, 0, 0,7,7,7, 8,// - 8, 2, 2, 3, 4, 4,7,7,7, 8,// - 8, 1, 4, 4, 4, 5,7,7,7, 8}; - - - byte[] prev = {// - 8, 3, 3, 3, 3, 3,7,7,7, 8,// - 8, 1, 1, 1, 1, 1,7,7,7, 8, // - 8, 5, 5, 5, 5, 0,7,7,7, 8,// - 8, 2, 2, 0, 0, 0,7,7,7, 8,// - 8, 2, 0, 0, 0, 5,7,7,7, 8}; - ByteArrayOutputStream buf = new ByteArrayOutputStream(); - DataChunkOutputStream out = new DataChunkOutputStream(buf); - MicrosoftRLEEncoder enc = new MicrosoftRLEEncoder(); - - try { - enc.writeDelta8(out, data, prev, 1, 8, 10, 5); - //enc.writeKey8(out, data, 1, 8, 10,5); - out.close(); - - byte[] result = buf.toByteArray(); - System.out.println("size:" + result.length); - System.out.println(Arrays.toString(result)); - System.out.print("0x ["); - - for (int i = 0; i < result.length; i++) { - if (i != 0) { - System.out.print(','); - } - String hex = "00" + Integer.toHexString(result[i]); - System.out.print(hex.substring(hex.length() - 2)); - } - System.out.println(']'); - - } catch (IOException ex) { - ex.printStackTrace(); - } - } -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/MultiListener.java --- a/src/com/aurellem/capture/MultiListener.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -package com.aurellem.capture; - -import com.jme3.audio.Listener; - -public interface MultiListener { - - void addListener(Listener l); - void registerSoundProcessor(Listener l, SoundProcessor sp); - void registerSoundProcessor(SoundProcessor sp); - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/SeekableByteArrayOutputStream.java --- a/src/com/aurellem/capture/SeekableByteArrayOutputStream.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,153 +0,0 @@ -/* - * @(#)SeekableByteArrayOutputStream.java 1.0 2010-12-27 - * - * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. - * All rights reserved. - * - * You may not use, copy or modify this file, except in compliance with the - * license agreement you entered into with Werner Randelshofer. - * For details see accompanying license terms. - */ - -package com.aurellem.capture; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Arrays; -import static java.lang.Math.*; -/** - * {@code SeekableByteArrayOutputStream}. - * - * @author Werner Randelshofer - * @version 1.0 2010-12-27 Created. - */ -public class SeekableByteArrayOutputStream extends ByteArrayOutputStream { - - /** - * The current stream position. - */ - private int pos; - - /** - * Creates a new byte array output stream. The buffer capacity is - * initially 32 bytes, though its size increases if necessary. - */ - public SeekableByteArrayOutputStream() { - this(32); - } - - /** - * Creates a new byte array output stream, with a buffer capacity of - * the specified size, in bytes. - * - * @param size the initial size. - * @exception IllegalArgumentException if size is negative. - */ - public SeekableByteArrayOutputStream(int size) { - if (size < 0) { - throw new IllegalArgumentException("Negative initial size: " - + size); - } - buf = new byte[size]; - } - - /** - * Writes the specified byte to this byte array output stream. - * - * @param b the byte to be written. - */ - @Override - public synchronized void write(int b) { - int newcount = max(pos + 1, count); - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - buf[pos++] = (byte)b; - count = newcount; - } - - /** - * Writes len bytes from the specified byte array - * starting at offset off to this byte array output stream. - * - * @param b the data. - * @param off the start offset in the data. - * @param len the number of bytes to write. - */ - @Override - public synchronized void write(byte b[], int off, int len) { - if ((off < 0) || (off > b.length) || (len < 0) || - ((off + len) > b.length) || ((off + len) < 0)) { - throw new IndexOutOfBoundsException(); - } else if (len == 0) { - return; - } - int newcount = max(pos+len,count); - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - System.arraycopy(b, off, buf, pos, len); - pos+=len; - count = newcount; - } - - /** - * Resets the count field of this byte array output - * stream to zero, so that all currently accumulated output in the - * output stream is discarded. The output stream can be used again, - * reusing the already allocated buffer space. - * - * @see java.io.ByteArrayInputStream#count - */ - @Override - public synchronized void reset() { - count = 0; - pos=0; - } - - /** - * Sets the current stream position to the desired location. The - * next read will occur at this location. The bit offset is set - * to 0. - * - *

An IndexOutOfBoundsException will be thrown if - * pos is smaller than the flushed position (as - * returned by getflushedPosition). - * - *

It is legal to seek past the end of the file; an - * EOFException will be thrown only if a read is - * performed. - * - * @param pos a long containing the desired file - * pointer position. - * - * @exception IndexOutOfBoundsException if pos is smaller - * than the flushed position. - * @exception IOException if any other I/O error occurs. - */ - public void seek(long pos) throws IOException { - this.pos = (int)pos; - } - - /** - * Returns the current byte position of the stream. The next write - * will take place starting at this offset. - * - * @return a long containing the position of the stream. - * - * @exception IOException if an I/O error occurs. - */ - public long getStreamPosition() throws IOException { - return pos; - } - - /** Writes the contents of the byte array into the specified output - * stream. - * @param out - */ - public void toOutputStream(OutputStream out) throws IOException { - out.write(buf, 0, count); - } - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/SoundProcessor.java --- a/src/com/aurellem/capture/SoundProcessor.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -package com.aurellem.capture; - -import java.nio.ByteBuffer; - -public interface SoundProcessor { - - void cleanup(); - - void process(ByteBuffer audioSamples, int numSamples); - -} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/WaveFileWriter.java --- a/src/com/aurellem/capture/WaveFileWriter.java Tue Oct 25 12:29:40 2011 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -package com.aurellem.capture; - -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Vector; - -import javax.sound.sampled.AudioFileFormat; -import javax.sound.sampled.AudioFormat; -import javax.sound.sampled.AudioInputStream; -import javax.sound.sampled.AudioSystem; - -public class WaveFileWriter implements SoundProcessor { - - public Vector fullWaveData = new Vector(); - public File targetFile; - - public WaveFileWriter(File targetFile){ - this.targetFile = targetFile; - } - - public void cleanup() { - byte[] data = new byte[this.fullWaveData.size()]; - - for (int i = 0; i < this.fullWaveData.size(); i++){ - data[i] = this.fullWaveData.get(i);} - - - ByteArrayInputStream input = new ByteArrayInputStream(data); - AudioFormat format = new AudioFormat(44100.0f, 32, 1, true, false); - AudioInputStream audioInput = new AudioInputStream(input, format, data.length / 4 ); - try {AudioSystem.write(audioInput, AudioFileFormat.Type.WAVE, targetFile);} - catch (IOException e) {e.printStackTrace();} - - } - - - public void process(ByteBuffer audioSamples, int numSamples) { - for (int i = 0; inull, which represents the zeroth + * LWJGL listener which is created automatically. + */ + public Vector listeners = new Vector(); + + public void initialize(){ + super.initialize(); + listeners.add(null); + } + + /** + * This is to call the native methods which require the OpenAL device ID. + * currently it is obtained through reflection. + */ + private long deviceID; + + /** + * To ensure that deviceID and listeners are + * properly initialized before any additional listeners are added. + */ + private CountDownLatch latch = new CountDownLatch(1); + + private void waitForInit(){ + try {latch.await();} + catch (InterruptedException e) {e.printStackTrace();} + } + + /** + * Each listener (including the main LWJGL listener) can be registered + * with a SoundProcessor, which this Renderer will call whenever + * there is new audio data to be processed. + */ + public HashMap soundProcessorMap = + new HashMap(); + + + /** + * Create a new slave context on the recorder device which will render all the + * sounds in the main LWJGL context with respect to this listener. + */ + public void addListener(Listener l) { + try {this.latch.await();} + catch (InterruptedException e) {e.printStackTrace();} + this.addListener(); + this.listeners.add(l); + } + + /** + * Whenever new data is rendered in the perspective of this listener, + * this Renderer will send that data to the SoundProcessor of your choosing. + */ + public void registerSoundProcessor(Listener l, SoundProcessor sp) { + this.soundProcessorMap.put(l, sp); + } + + /** + * Registers a SoundProcessor for the main LWJGL context. IF all you want to + * do is record the sound you would normally hear in your application, then + * this is the only method you have to worry about. + */ + public void registerSoundProcessor(SoundProcessor sp){ + // register a sound processor for the default listener. + this.soundProcessorMap.put(null, sp); + } + + private static final Logger logger = + Logger.getLogger(AudioSend.class.getName()); + + + //////////// Native Methods + + /** This establishes the LWJGL context as the context which will be copies to all + * other contexts. It must be called before any calls to addListener(); + */ + public void initDevice(){ + ninitDevice(this.deviceID);} + public static native void ninitDevice(long device); + + /** + * The send device does not automatically process sound. This step function will cause + * the desired number of samples to be processed for each listener. The results will then + * be available via calls to getSamples() for each listener. + * @param samples + */ + public void step(int samples){ + nstep(this.deviceID, samples);} + public static native void nstep(long device, int samples); + + /** + * Retrieve the final rendered sound for a particular listener. contextNum == 0 + * is the main LWJGL context. + * @param buffer + * @param samples + * @param contextNum + */ + public void getSamples(ByteBuffer buffer, int samples, int contextNum){ + ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);} + public static native void ngetSamples( + long device, ByteBuffer buffer, int position, int samples, int contextNum); + + /** + * Create an additional listener on the recorder device. The device itself will manage + * this listener and synchronize it with the main LWJGL context. Processed sound samples + * for this listener will be available via a call to getSamples() with + * contextNum equal to the number of times this method has been called. + */ + public void addListener(){naddListener(this.deviceID);} + public static native void naddListener(long device); + + /** + * This will internally call alListener3f in the appropriate slave context and update + * that context's listener's parameters. Calling this for a number greater than the current + * number of slave contexts will have no effect. + * @param pname + * @param v1 + * @param v2 + * @param v3 + * @param contextNum + */ + public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){ + nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);} + public static native void + nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum); + + /** + * This will internally call alListenerf in the appropriate slave context and update + * that context's listener's parameters. Calling this for a number greater than the current + * number of slave contexts will have no effect. + * @param pname + * @param v1 + * @param contextNum + */ + public void setNthListenerf(int pname, float v1, int contextNum){ + nsetNthListenerf(pname, v1, this.deviceID, contextNum);} + public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum); + + /** + * Instead of taking whatever device is available on the system, this call + * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited + * capacity. For each listener, the device renders it not to the sound device, but + * instead to buffers which it makes available via JNI. + */ + public void initInThread(){ + try{ + if (!AL.isCreated()){ + AL.create("Multiple Audio Send", 44100, 60, false); + } + }catch (OpenALException ex){ + logger.log(Level.SEVERE, "Failed to load audio library", ex); + System.exit(1); + return; + }catch (LWJGLException ex){ + logger.log(Level.SEVERE, "Failed to load audio library", ex); + System.exit(1); + return; + } + super.initInThread(); + + ALCdevice device = AL.getDevice(); + + // RLM: use reflection to grab the ID of our device for use later. + try { + Field deviceIDField; + deviceIDField = ALCdevice.class.getDeclaredField("device"); + deviceIDField.setAccessible(true); + try {deviceID = (Long)deviceIDField.get(device);} + catch (IllegalArgumentException e) {e.printStackTrace();} + catch (IllegalAccessException e) {e.printStackTrace();} + deviceIDField.setAccessible(false);} + catch (SecurityException e) {e.printStackTrace();} + catch (NoSuchFieldException e) {e.printStackTrace();} + + // the LWJGL context must be established as the master context before + // any other listeners can be created on this device. + initDevice(); + // Now, everything is initialized, and it is safe to add more listeners. + latch.countDown(); + } + + + public void cleanup(){ + for(SoundProcessor sp : this.soundProcessorMap.values()){ + sp.cleanup(); + } + super.cleanup(); + } + + public void updateAllListeners(){ + for (int i = 0; i < this.listeners.size(); i++){ + Listener lis = this.listeners.get(i); + if (null != lis){ + Vector3f location = lis.getLocation(); + Vector3f velocity = lis.getVelocity(); + Vector3f orientation = lis.getUp(); + float gain = lis.getVolume(); + setNthListener3f(AL10.AL_POSITION, + location.x, location.y, location.z, i); + setNthListener3f(AL10.AL_VELOCITY, + velocity.x, velocity.y, velocity.z, i); + setNthListener3f(AL10.AL_ORIENTATION, + orientation.x, orientation.y, orientation.z, i); + setNthListenerf(AL10.AL_GAIN, gain, i); + } + } + } + + + public final static int BYTES_PER_SAMPLE = 4; + private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); + + public void dispatchAudio(float tpf){ + int samplesToGet = (int) (tpf * 44100); + try {latch.await();} + catch (InterruptedException e) {e.printStackTrace();} + step(samplesToGet); + updateAllListeners(); + + for (int i = 0; i < this.listeners.size(); i++){ + buffer.clear(); + this.getSamples(buffer, samplesToGet, i); + SoundProcessor sp = + this.soundProcessorMap.get(this.listeners.get(i)); + if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} + } + + } + + public void update(float tpf){ + super.update(tpf); + dispatchAudio(tpf); + } + +} + diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/audio/MultiListener.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/audio/MultiListener.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,11 @@ +package com.aurellem.capture.audio; + +import com.jme3.audio.Listener; + +public interface MultiListener { + + void addListener(Listener l); + void registerSoundProcessor(Listener l, SoundProcessor sp); + void registerSoundProcessor(SoundProcessor sp); + +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/audio/SeekableByteArrayOutputStream.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/audio/SeekableByteArrayOutputStream.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,153 @@ +/* + * @(#)SeekableByteArrayOutputStream.java 1.0 2010-12-27 + * + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. + * All rights reserved. + * + * You may not use, copy or modify this file, except in compliance with the + * license agreement you entered into with Werner Randelshofer. + * For details see accompanying license terms. + */ + +package com.aurellem.capture.audio; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; +import static java.lang.Math.*; +/** + * {@code SeekableByteArrayOutputStream}. + * + * @author Werner Randelshofer + * @version 1.0 2010-12-27 Created. + */ +public class SeekableByteArrayOutputStream extends ByteArrayOutputStream { + + /** + * The current stream position. + */ + private int pos; + + /** + * Creates a new byte array output stream. The buffer capacity is + * initially 32 bytes, though its size increases if necessary. + */ + public SeekableByteArrayOutputStream() { + this(32); + } + + /** + * Creates a new byte array output stream, with a buffer capacity of + * the specified size, in bytes. + * + * @param size the initial size. + * @exception IllegalArgumentException if size is negative. + */ + public SeekableByteArrayOutputStream(int size) { + if (size < 0) { + throw new IllegalArgumentException("Negative initial size: " + + size); + } + buf = new byte[size]; + } + + /** + * Writes the specified byte to this byte array output stream. + * + * @param b the byte to be written. + */ + @Override + public synchronized void write(int b) { + int newcount = max(pos + 1, count); + if (newcount > buf.length) { + buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); + } + buf[pos++] = (byte)b; + count = newcount; + } + + /** + * Writes len bytes from the specified byte array + * starting at offset off to this byte array output stream. + * + * @param b the data. + * @param off the start offset in the data. + * @param len the number of bytes to write. + */ + @Override + public synchronized void write(byte b[], int off, int len) { + if ((off < 0) || (off > b.length) || (len < 0) || + ((off + len) > b.length) || ((off + len) < 0)) { + throw new IndexOutOfBoundsException(); + } else if (len == 0) { + return; + } + int newcount = max(pos+len,count); + if (newcount > buf.length) { + buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); + } + System.arraycopy(b, off, buf, pos, len); + pos+=len; + count = newcount; + } + + /** + * Resets the count field of this byte array output + * stream to zero, so that all currently accumulated output in the + * output stream is discarded. The output stream can be used again, + * reusing the already allocated buffer space. + * + * @see java.io.ByteArrayInputStream#count + */ + @Override + public synchronized void reset() { + count = 0; + pos=0; + } + + /** + * Sets the current stream position to the desired location. The + * next read will occur at this location. The bit offset is set + * to 0. + * + *

An IndexOutOfBoundsException will be thrown if + * pos is smaller than the flushed position (as + * returned by getflushedPosition). + * + *

It is legal to seek past the end of the file; an + * EOFException will be thrown only if a read is + * performed. + * + * @param pos a long containing the desired file + * pointer position. + * + * @exception IndexOutOfBoundsException if pos is smaller + * than the flushed position. + * @exception IOException if any other I/O error occurs. + */ + public void seek(long pos) throws IOException { + this.pos = (int)pos; + } + + /** + * Returns the current byte position of the stream. The next write + * will take place starting at this offset. + * + * @return a long containing the position of the stream. + * + * @exception IOException if an I/O error occurs. + */ + public long getStreamPosition() throws IOException { + return pos; + } + + /** Writes the contents of the byte array into the specified output + * stream. + * @param out + */ + public void toOutputStream(OutputStream out) throws IOException { + out.write(buf, 0, count); + } + +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/audio/SoundProcessor.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/audio/SoundProcessor.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,11 @@ +package com.aurellem.capture.audio; + +import java.nio.ByteBuffer; + +public interface SoundProcessor { + + void cleanup(); + + void process(ByteBuffer audioSamples, int numSamples); + +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/audio/WaveFileWriter.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/audio/WaveFileWriter.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,46 @@ +package com.aurellem.capture.audio; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Vector; + +import javax.sound.sampled.AudioFileFormat; +import javax.sound.sampled.AudioFormat; +import javax.sound.sampled.AudioInputStream; +import javax.sound.sampled.AudioSystem; + +public class WaveFileWriter implements SoundProcessor { + + public Vector fullWaveData = new Vector(); + public File targetFile; + + public WaveFileWriter(File targetFile){ + this.targetFile = targetFile; + } + + public void cleanup() { + byte[] data = new byte[this.fullWaveData.size()]; + + for (int i = 0; i < this.fullWaveData.size(); i++){ + data[i] = this.fullWaveData.get(i);} + + + ByteArrayInputStream input = new ByteArrayInputStream(data); + AudioFormat format = new AudioFormat(44100.0f, 32, 1, true, false); + AudioInputStream audioInput = new AudioInputStream(input, format, data.length / 4 ); + try {AudioSystem.write(audioInput, AudioFileFormat.Type.WAVE, targetFile);} + catch (IOException e) {e.printStackTrace();} + + } + + + public void process(ByteBuffer audioSamples, int numSamples) { + for (int i = 0; i + * The images are written as video frames. + *

+ * Video frames can be encoded with one of the following formats: + *

+ * All frames must have the same format. + * When JPG is used each frame can have an individual encoding quality. + *

+ * All frames in an AVI file must have the same duration. The duration can + * be set by setting an appropriate pair of values using methods + * {@link #setFrameRate} and {@link #setTimeScale}. + *

+ * The length of an AVI 1.0 file is limited to 1 GB. + * This class supports lengths of up to 4 GB, but such files may not work on + * all players. + *

+ * For detailed information about the AVI RIFF file format see:
+ * msdn.microsoft.com AVI RIFF
+ * www.microsoft.com FOURCC for Video Compression
+ * www.saettler.com RIFF
+ * + * @author Werner Randelshofer + * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. + *
1.5 2011-01-06 Adds support for RLE 8-bit video format. + *
1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets + * in "idx1" chunk. + *
1.3.2 2010-12-27 File size limit is 1 GB. + *
1.3.1 2010-07-19 Fixes seeking and calculation of offsets. + *
1.3 2010-07-08 Adds constructor with ImageOutputStream. + * Added method getVideoDimension(). + *
1.2 2009-08-29 Adds support for RAW video format. + *
1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih + * chunk. Changed the API to reflect that AVI works with frame rates instead of + * with frame durations. + *
1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG + * encoded video. + *
1.0 2008-08-11 Created. + */ +public class AVIOutputStream { + + /** + * Underlying output stream. + */ + private ImageOutputStream out; + /** The offset of the QuickTime stream in the underlying ImageOutputStream. + * Normally this is 0 unless the underlying stream already contained data + * when it was passed to the constructor. + */ + private long streamOffset; + /** Previous frame for delta compression. */ + private Object previousData; + + /** + * Supported video encodings. + */ + public static enum VideoFormat { + + RAW, RLE, JPG, PNG; + } + /** + * Current video formats. + */ + private VideoFormat videoFormat; + /** + * Quality of JPEG encoded video frames. + */ + private float quality = 0.9f; + /** + * Creation time of the movie output stream. + */ + private Date creationTime; + /** + * Width of the video frames. All frames must have the same width. + * The value -1 is used to mark unspecified width. + */ + private int imgWidth = -1; + /** + * Height of the video frames. All frames must have the same height. + * The value -1 is used to mark unspecified height. + */ + private int imgHeight = -1; + /** Number of bits per pixel. */ + private int imgDepth = 24; + /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ + private IndexColorModel palette; + private IndexColorModel previousPalette; + /** Video encoder. */ + + /** + * The timeScale of the movie. + *

+ * Used with frameRate to specify the time scale that this stream will use. + * Dividing frameRate by timeScale gives the number of samples per second. + * For video streams, this is the frame rate. For audio streams, this rate + * corresponds to the time needed to play nBlockAlign bytes of audio, which + * for PCM audio is the just the sample rate. + */ + private int timeScale = 1; + /** + * The frameRate of the movie in timeScale units. + *

+ * @see timeScale + */ + private int frameRate = 30; + /** Interval between keyframes. */ + private int syncInterval = 30; + + /** + * The states of the movie output stream. + */ + private static enum States { + + STARTED, FINISHED, CLOSED; + } + /** + * The current state of the movie output stream. + */ + private States state = States.FINISHED; + + /** + * AVI stores media data in samples. + * A sample is a single element in a sequence of time-ordered data. + */ + private static class Sample { + + String chunkType; + /** Offset of the sample relative to the start of the AVI file. + */ + long offset; + /** Data length of the sample. */ + long length; + /** + * The duration of the sample in time scale units. + */ + int duration; + /** Whether the sample is a sync-sample. */ + boolean isSync; + + /** + * Creates a new sample. + * @param duration + * @param offset + * @param length + */ + public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { + this.chunkType = chunkId; + this.duration = duration; + this.offset = offset; + this.length = length; + this.isSync = isSync; + } + } + /** + * List of video frames. + */ + private LinkedList videoFrames; + /** + * This chunk holds the whole AVI content. + */ + private CompositeChunk aviChunk; + /** + * This chunk holds the movie frames. + */ + private CompositeChunk moviChunk; + /** + * This chunk holds the AVI Main Header. + */ + FixedSizeDataChunk avihChunk; + /** + * This chunk holds the AVI Stream Header. + */ + FixedSizeDataChunk strhChunk; + /** + * This chunk holds the AVI Stream Format Header. + */ + FixedSizeDataChunk strfChunk; + + /** + * Chunk base class. + */ + private abstract class Chunk { + + /** + * The chunkType of the chunk. A String with the length of 4 characters. + */ + protected String chunkType; + /** + * The offset of the chunk relative to the start of the + * ImageOutputStream. + */ + protected long offset; + + /** + * Creates a new Chunk at the current position of the ImageOutputStream. + * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. + */ + public Chunk(String chunkType) throws IOException { + this.chunkType = chunkType; + offset = getRelativeStreamPosition(); + } + + /** + * Writes the chunk to the ImageOutputStream and disposes it. + */ + public abstract void finish() throws IOException; + + /** + * Returns the size of the chunk including the size of the chunk header. + * @return The size of the chunk. + */ + public abstract long size(); + } + + /** + * A CompositeChunk contains an ordered list of Chunks. + */ + private class CompositeChunk extends Chunk { + + /** + * The type of the composite. A String with the length of 4 characters. + */ + protected String compositeType; + private LinkedList children; + private boolean finished; + + /** + * Creates a new CompositeChunk at the current position of the + * ImageOutputStream. + * @param compositeType The type of the composite. + * @param chunkType The type of the chunk. + */ + public CompositeChunk(String compositeType, String chunkType) throws IOException { + super(chunkType); + this.compositeType = compositeType; + //out.write + out.writeLong(0); // make room for the chunk header + out.writeInt(0); // make room for the chunk header + children = new LinkedList(); + } + + public void add(Chunk child) throws IOException { + if (children.size() > 0) { + children.getLast().finish(); + } + children.add(child); + } + + /** + * Writes the chunk and all its children to the ImageOutputStream + * and disposes of all resources held by the chunk. + * @throws java.io.IOException + */ + @Override + public void finish() throws IOException { + if (!finished) { + if (size() > 0xffffffffL) { + throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); + } + + long pointer = getRelativeStreamPosition(); + seekRelative(offset); + + DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); + headerData.writeType(compositeType); + headerData.writeUInt(size() - 8); + headerData.writeType(chunkType); + for (Chunk child : children) { + child.finish(); + } + seekRelative(pointer); + if (size() % 2 == 1) { + out.writeByte(0); // write pad byte + } + finished = true; + } + } + + @Override + public long size() { + long length = 12; + for (Chunk child : children) { + length += child.size() + child.size() % 2; + } + return length; + } + } + + /** + * Data Chunk. + */ + private class DataChunk extends Chunk { + + private DataChunkOutputStream data; + private boolean finished; + + /** + * Creates a new DataChunk at the current position of the + * ImageOutputStream. + * @param chunkType The chunkType of the chunk. + */ + public DataChunk(String name) throws IOException { + super(name); + out.writeLong(0); // make room for the chunk header + data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); + } + + public DataChunkOutputStream getOutputStream() { + if (finished) { + throw new IllegalStateException("DataChunk is finished"); + } + return data; + } + + /** + * Returns the offset of this chunk to the beginning of the random access file + * @return + */ + public long getOffset() { + return offset; + } + + @Override + public void finish() throws IOException { + if (!finished) { + long sizeBefore = size(); + + if (size() > 0xffffffffL) { + throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); + } + + long pointer = getRelativeStreamPosition(); + seekRelative(offset); + + DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); + headerData.writeType(chunkType); + headerData.writeUInt(size() - 8); + seekRelative(pointer); + if (size() % 2 == 1) { + out.writeByte(0); // write pad byte + } + finished = true; + long sizeAfter = size(); + if (sizeBefore != sizeAfter) { + System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); + } + } + } + + @Override + public long size() { + return 8 + data.size(); + } + } + + /** + * A DataChunk with a fixed size. + */ + private class FixedSizeDataChunk extends Chunk { + + private DataChunkOutputStream data; + private boolean finished; + private long fixedSize; + + /** + * Creates a new DataChunk at the current position of the + * ImageOutputStream. + * @param chunkType The chunkType of the chunk. + */ + public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { + super(chunkType); + this.fixedSize = fixedSize; + data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); + data.writeType(chunkType); + data.writeUInt(fixedSize); + data.clearCount(); + + // Fill fixed size with nulls + byte[] buf = new byte[(int) Math.min(512, fixedSize)]; + long written = 0; + while (written < fixedSize) { + data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); + written += Math.min(buf.length, fixedSize - written); + } + if (fixedSize % 2 == 1) { + out.writeByte(0); // write pad byte + } + seekToStartOfData(); + } + + public DataChunkOutputStream getOutputStream() { + /*if (finished) { + throw new IllegalStateException("DataChunk is finished"); + }*/ + return data; + } + + /** + * Returns the offset of this chunk to the beginning of the random access file + * @return + */ + public long getOffset() { + return offset; + } + + public void seekToStartOfData() throws IOException { + seekRelative(offset + 8); + data.clearCount(); + } + + public void seekToEndOfChunk() throws IOException { + seekRelative(offset + 8 + fixedSize + fixedSize % 2); + } + + @Override + public void finish() throws IOException { + if (!finished) { + finished = true; + } + } + + @Override + public long size() { + return 8 + fixedSize; + } + } + + /** + * Creates a new AVI file with the specified video format and + * frame rate. The video has 24 bits per pixel. + * + * @param file the output file + * @param format Selects an encoder for the video format. + * @param bitsPerPixel the number of bits per pixel. + * @exception IllegalArgumentException if videoFormat is null or if + * frame rate is <= 0 + */ + public AVIOutputStream(File file, VideoFormat format) throws IOException { + this(file,format,24); + } + /** + * Creates a new AVI file with the specified video format and + * frame rate. + * + * @param file the output file + * @param format Selects an encoder for the video format. + * @param bitsPerPixel the number of bits per pixel. + * @exception IllegalArgumentException if videoFormat is null or if + * frame rate is <= 0 + */ + public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { + if (format == null) { + throw new IllegalArgumentException("format must not be null"); + } + + if (file.exists()) { + file.delete(); + } + this.out = new FileImageOutputStream(file); + this.streamOffset = 0; + this.videoFormat = format; + this.videoFrames = new LinkedList(); + this.imgDepth = bitsPerPixel; + if (imgDepth == 4) { + byte[] gray = new byte[16]; + for (int i = 0; i < gray.length; i++) { + gray[i] = (byte) ((i << 4) | i); + } + palette = new IndexColorModel(4, 16, gray, gray, gray); + } else if (imgDepth == 8) { + byte[] gray = new byte[256]; + for (int i = 0; i < gray.length; i++) { + gray[i] = (byte) i; + } + palette = new IndexColorModel(8, 256, gray, gray, gray); + } + + } + + /** + * Creates a new AVI output stream with the specified video format and + * framerate. + * + * @param out the underlying output stream + * @param format Selects an encoder for the video format. + * @exception IllegalArgumentException if videoFormat is null or if + * framerate is <= 0 + */ + public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { + if (format == null) { + throw new IllegalArgumentException("format must not be null"); + } + this.out = out; + this.streamOffset = out.getStreamPosition(); + this.videoFormat = format; + this.videoFrames = new LinkedList(); + } + + /** + * Used with frameRate to specify the time scale that this stream will use. + * Dividing frameRate by timeScale gives the number of samples per second. + * For video streams, this is the frame rate. For audio streams, this rate + * corresponds to the time needed to play nBlockAlign bytes of audio, which + * for PCM audio is the just the sample rate. + *

+ * The default value is 1. + * + * @param newValue + */ + public void setTimeScale(int newValue) { + if (newValue <= 0) { + throw new IllegalArgumentException("timeScale must be greater 0"); + } + this.timeScale = newValue; + } + + /** + * Returns the time scale of this media. + * + * @return time scale + */ + public int getTimeScale() { + return timeScale; + } + + /** + * Sets the rate of video frames in time scale units. + *

+ * The default value is 30. Together with the default value 1 of timeScale + * this results in 30 frames pers second. + * + * @param newValue + */ + public void setFrameRate(int newValue) { + if (newValue <= 0) { + throw new IllegalArgumentException("frameDuration must be greater 0"); + } + if (state == States.STARTED) { + throw new IllegalStateException("frameDuration must be set before the first frame is written"); + } + this.frameRate = newValue; + } + + /** + * Returns the frame rate of this media. + * + * @return frame rate + */ + public int getFrameRate() { + return frameRate; + } + + /** Sets the global color palette. */ + public void setPalette(IndexColorModel palette) { + this.palette = palette; + } + + /** + * Sets the compression quality of the video track. + * A value of 0 stands for "high compression is important" a value of + * 1 for "high image quality is important". + *

+ * Changing this value affects frames which are subsequently written + * to the AVIOutputStream. Frames which have already been written + * are not changed. + *

+ * This value has only effect on videos encoded with JPG format. + *

+ * The default value is 0.9. + * + * @param newValue + */ + public void setVideoCompressionQuality(float newValue) { + this.quality = newValue; + } + + /** + * Returns the video compression quality. + * + * @return video compression quality + */ + public float getVideoCompressionQuality() { + return quality; + } + + /** + * Sets the dimension of the video track. + *

+ * You need to explicitly set the dimension, if you add all frames from + * files or input streams. + *

+ * If you add frames from buffered images, then AVIOutputStream + * can determine the video dimension from the image width and height. + * + * @param width Must be greater than 0. + * @param height Must be greater than 0. + */ + public void setVideoDimension(int width, int height) { + if (width < 1 || height < 1) { + throw new IllegalArgumentException("width and height must be greater zero."); + } + this.imgWidth = width; + this.imgHeight = height; + } + + /** + * Gets the dimension of the video track. + *

+ * Returns null if the dimension is not known. + */ + public Dimension getVideoDimension() { + if (imgWidth < 1 || imgHeight < 1) { + return null; + } + return new Dimension(imgWidth, imgHeight); + } + + /** + * Sets the state of the QuickTimeOutpuStream to started. + *

+ * If the state is changed by this method, the prolog is + * written. + */ + private void ensureStarted() throws IOException { + if (state != States.STARTED) { + creationTime = new Date(); + writeProlog(); + state = States.STARTED; + } + } + + /** + * Writes a frame to the video track. + *

+ * If the dimension of the video track has not been specified yet, it + * is derived from the first buffered image added to the AVIOutputStream. + * + * @param image The frame image. + * + * @throws IllegalArgumentException if the duration is less than 1, or + * if the dimension of the frame does not match the dimension of the video + * track. + * @throws IOException if writing the image failed. + */ + public void writeFrame(BufferedImage image) throws IOException { + ensureOpen(); + ensureStarted(); + + // Get the dimensions of the first image + if (imgWidth == -1) { + imgWidth = image.getWidth(); + imgHeight = image.getHeight(); + } else { + // The dimension of the image must match the dimension of the video track + if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { + throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() + + "] (width=" + image.getWidth() + ", height=" + image.getHeight() + + ") differs from image[0] (width=" + + imgWidth + ", height=" + imgHeight); + } + } + + DataChunk videoFrameChunk; + long offset = getRelativeStreamPosition(); + boolean isSync = true; + switch (videoFormat) { + case RAW: { + switch (imgDepth) { + case 4: { + IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); + int[] imgRGBs = new int[16]; + imgPalette.getRGBs(imgRGBs); + int[] previousRGBs = new int[16]; + if (previousPalette == null) { + previousPalette = palette; + } + previousPalette.getRGBs(previousRGBs); + if (!Arrays.equals(imgRGBs, previousRGBs)) { + previousPalette = imgPalette; + DataChunk paletteChangeChunk = new DataChunk("00pc"); + /* + int first = imgPalette.getMapSize(); + int last = -1; + for (int i = 0; i < 16; i++) { + if (previousRGBs[i] != imgRGBs[i] && i < first) { + first = i; + } + if (previousRGBs[i] != imgRGBs[i] && i > last) { + last = i; + } + }*/ + int first = 0; + int last = imgPalette.getMapSize() - 1; + /* + * typedef struct { + BYTE bFirstEntry; + BYTE bNumEntries; + WORD wFlags; + PALETTEENTRY peNew[]; + } AVIPALCHANGE; + * + * typedef struct tagPALETTEENTRY { + BYTE peRed; + BYTE peGreen; + BYTE peBlue; + BYTE peFlags; + } PALETTEENTRY; + */ + DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); + pOut.writeByte(first);//bFirstEntry + pOut.writeByte(last - first + 1);//bNumEntries + pOut.writeShort(0);//wFlags + + for (int i = first; i <= last; i++) { + pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red + pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green + pOut.writeByte(imgRGBs[i] & 0xff); // blue + pOut.writeByte(0); // reserved*/ + } + + moviChunk.add(paletteChangeChunk); + paletteChangeChunk.finish(); + long length = getRelativeStreamPosition() - offset; + videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); + offset = getRelativeStreamPosition(); + } + + videoFrameChunk = new DataChunk("00db"); + byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); + byte[] rgb4 = new byte[imgWidth / 2]; + for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down + for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { + rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); + } + videoFrameChunk.getOutputStream().write(rgb4); + } + break; + } + case 8: { + IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); + int[] imgRGBs = new int[256]; + imgPalette.getRGBs(imgRGBs); + int[] previousRGBs = new int[256]; + if (previousPalette == null) { + previousPalette = palette; + } + previousPalette.getRGBs(previousRGBs); + if (!Arrays.equals(imgRGBs, previousRGBs)) { + previousPalette = imgPalette; + DataChunk paletteChangeChunk = new DataChunk("00pc"); + /* + int first = imgPalette.getMapSize(); + int last = -1; + for (int i = 0; i < 16; i++) { + if (previousRGBs[i] != imgRGBs[i] && i < first) { + first = i; + } + if (previousRGBs[i] != imgRGBs[i] && i > last) { + last = i; + } + }*/ + int first = 0; + int last = imgPalette.getMapSize() - 1; + /* + * typedef struct { + BYTE bFirstEntry; + BYTE bNumEntries; + WORD wFlags; + PALETTEENTRY peNew[]; + } AVIPALCHANGE; + * + * typedef struct tagPALETTEENTRY { + BYTE peRed; + BYTE peGreen; + BYTE peBlue; + BYTE peFlags; + } PALETTEENTRY; + */ + DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); + pOut.writeByte(first);//bFirstEntry + pOut.writeByte(last - first + 1);//bNumEntries + pOut.writeShort(0);//wFlags + + for (int i = first; i <= last; i++) { + pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red + pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green + pOut.writeByte(imgRGBs[i] & 0xff); // blue + pOut.writeByte(0); // reserved*/ + } + + moviChunk.add(paletteChangeChunk); + paletteChangeChunk.finish(); + long length = getRelativeStreamPosition() - offset; + videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); + offset = getRelativeStreamPosition(); + } + + videoFrameChunk = new DataChunk("00db"); + byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); + for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down + videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); + } + break; + } + default: { + videoFrameChunk = new DataChunk("00db"); + WritableRaster raster = image.getRaster(); + int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data + byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data + for (int y = imgHeight - 1; y >= 0; --y) { // Upside down + raster.getPixels(0, y, imgWidth, 1, raw); + for (int x = 0, n = imgWidth * 3; x < n; x += 3) { + bytes[x + 2] = (byte) raw[x]; // Blue + bytes[x + 1] = (byte) raw[x + 1]; // Green + bytes[x] = (byte) raw[x + 2]; // Red + } + videoFrameChunk.getOutputStream().write(bytes); + } + break; + } + } + break; + } + + case JPG: { + videoFrameChunk = new DataChunk("00dc"); + ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); + ImageWriteParam iwParam = iw.getDefaultWriteParam(); + iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); + iwParam.setCompressionQuality(quality); + MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); + iw.setOutput(imgOut); + IIOImage img = new IIOImage(image, null, null); + iw.write(null, img, iwParam); + iw.dispose(); + break; + } + case PNG: + default: { + videoFrameChunk = new DataChunk("00dc"); + ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); + ImageWriteParam iwParam = iw.getDefaultWriteParam(); + MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); + iw.setOutput(imgOut); + IIOImage img = new IIOImage(image, null, null); + iw.write(null, img, iwParam); + iw.dispose(); + break; + } + } + long length = getRelativeStreamPosition() - offset; + moviChunk.add(videoFrameChunk); + videoFrameChunk.finish(); + + videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); + if (getRelativeStreamPosition() > 1L << 32) { + throw new IOException("AVI file is larger than 4 GB"); + } + } + + /** + * Writes a frame from a file to the video track. + *

+ * This method does not inspect the contents of the file. + * For example, Its your responsibility to only add JPG files if you have + * chosen the JPEG video format. + *

+ * If you add all frames from files or from input streams, then you + * have to explicitly set the dimension of the video track before you + * call finish() or close(). + * + * @param file The file which holds the image data. + * + * @throws IllegalStateException if the duration is less than 1. + * @throws IOException if writing the image failed. + */ + public void writeFrame(File file) throws IOException { + FileInputStream in = null; + try { + in = new FileInputStream(file); + writeFrame(in); + } finally { + if (in != null) { + in.close(); + } + } + } + + /** + * Writes a frame to the video track. + *

+ * This method does not inspect the contents of the file. + * For example, its your responsibility to only add JPG files if you have + * chosen the JPEG video format. + *

+ * If you add all frames from files or from input streams, then you + * have to explicitly set the dimension of the video track before you + * call finish() or close(). + * + * @param in The input stream which holds the image data. + * + * @throws IllegalArgumentException if the duration is less than 1. + * @throws IOException if writing the image failed. + */ + public void writeFrame(InputStream in) throws IOException { + ensureOpen(); + ensureStarted(); + + DataChunk videoFrameChunk = new DataChunk( + videoFormat == VideoFormat.RAW ? "00db" : "00dc"); + moviChunk.add(videoFrameChunk); + OutputStream mdatOut = videoFrameChunk.getOutputStream(); + long offset = getRelativeStreamPosition(); + byte[] buf = new byte[512]; + int len; + while ((len = in.read(buf)) != -1) { + mdatOut.write(buf, 0, len); + } + long length = getRelativeStreamPosition() - offset; + videoFrameChunk.finish(); + videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); + if (getRelativeStreamPosition() > 1L << 32) { + throw new IOException("AVI file is larger than 4 GB"); + } + } + + /** + * Closes the movie file as well as the stream being filtered. + * + * @exception IOException if an I/O error has occurred + */ + public void close() throws IOException { + if (state == States.STARTED) { + finish(); + } + if (state != States.CLOSED) { + out.close(); + state = States.CLOSED; + } + } + + /** + * Finishes writing the contents of the AVI output stream without closing + * the underlying stream. Use this method when applying multiple filters + * in succession to the same output stream. + * + * @exception IllegalStateException if the dimension of the video track + * has not been specified or determined yet. + * @exception IOException if an I/O exception has occurred + */ + public void finish() throws IOException { + ensureOpen(); + if (state != States.FINISHED) { + if (imgWidth == -1 || imgHeight == -1) { + throw new IllegalStateException("image width and height must be specified"); + } + + moviChunk.finish(); + writeEpilog(); + state = States.FINISHED; + imgWidth = imgHeight = -1; + } + } + + /** + * Check to make sure that this stream has not been closed + */ + private void ensureOpen() throws IOException { + if (state == States.CLOSED) { + throw new IOException("Stream closed"); + } + } + + /** Gets the position relative to the beginning of the QuickTime stream. + *

+ * Usually this value is equal to the stream position of the underlying + * ImageOutputStream, but can be larger if the underlying stream already + * contained data. + * + * @return The relative stream position. + * @throws IOException + */ + private long getRelativeStreamPosition() throws IOException { + return out.getStreamPosition() - streamOffset; + } + + /** Seeks relative to the beginning of the QuickTime stream. + *

+ * Usually this equal to seeking in the underlying ImageOutputStream, but + * can be different if the underlying stream already contained data. + * + */ + private void seekRelative(long newPosition) throws IOException { + out.seek(newPosition + streamOffset); + } + + private void writeProlog() throws IOException { + // The file has the following structure: + // + // .RIFF AVI + // ..avih (AVI Header Chunk) + // ..LIST strl + // ...strh (Stream Header Chunk) + // ...strf (Stream Format Chunk) + // ..LIST movi + // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) + // ..idx1 (List of video data chunks and their location in the file) + + // The RIFF AVI Chunk holds the complete movie + aviChunk = new CompositeChunk("RIFF", "AVI "); + CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); + + // Write empty AVI Main Header Chunk - we fill the data in later + aviChunk.add(hdrlChunk); + avihChunk = new FixedSizeDataChunk("avih", 56); + avihChunk.seekToEndOfChunk(); + hdrlChunk.add(avihChunk); + + CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); + hdrlChunk.add(strlChunk); + + // Write empty AVI Stream Header Chunk - we fill the data in later + strhChunk = new FixedSizeDataChunk("strh", 56); + strhChunk.seekToEndOfChunk(); + strlChunk.add(strhChunk); + strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); + strfChunk.seekToEndOfChunk(); + strlChunk.add(strfChunk); + + moviChunk = new CompositeChunk("LIST", "movi"); + aviChunk.add(moviChunk); + + + } + + private void writeEpilog() throws IOException { + // Compute values + int duration = 0; + for (Sample s : videoFrames) { + duration += s.duration; + } + long bufferSize = 0; + for (Sample s : videoFrames) { + if (s.length > bufferSize) { + bufferSize = s.length; + } + } + + + DataChunkOutputStream d; + + /* Create Idx1 Chunk and write data + * ------------- + typedef struct _avioldindex { + FOURCC fcc; + DWORD cb; + struct _avioldindex_entry { + DWORD dwChunkId; + DWORD dwFlags; + DWORD dwOffset; + DWORD dwSize; + } aIndex[]; + } AVIOLDINDEX; + */ + DataChunk idx1Chunk = new DataChunk("idx1"); + aviChunk.add(idx1Chunk); + d = idx1Chunk.getOutputStream(); + long moviListOffset = moviChunk.offset + 8; + //moviListOffset = 0; + for (Sample f : videoFrames) { + + d.writeType(f.chunkType); // dwChunkId + // Specifies a FOURCC that identifies a stream in the AVI file. The + // FOURCC must have the form 'xxyy' where xx is the stream number and yy + // is a two-character code that identifies the contents of the stream: + // + // Two-character code Description + // db Uncompressed video frame + // dc Compressed video frame + // pc Palette change + // wb Audio data + + d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// + | (f.isSync ? 0x10 : 0x0)); // dwFlags + // Specifies a bitwise combination of zero or more of the following + // flags: + // + // Value Name Description + // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. + // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. + // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the + // stream. For example, this flag should be set for + // palette changes. + + d.writeUInt(f.offset - moviListOffset); // dwOffset + // Specifies the location of the data chunk in the file. The value + // should be specified as an offset, in bytes, from the start of the + // 'movi' list; however, in some AVI files it is given as an offset from + // the start of the file. + + d.writeUInt(f.length); // dwSize + // Specifies the size of the data chunk, in bytes. + } + idx1Chunk.finish(); + + /* Write Data into AVI Main Header Chunk + * ------------- + * The AVIMAINHEADER structure defines global information in an AVI file. + * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx + typedef struct _avimainheader { + FOURCC fcc; + DWORD cb; + DWORD dwMicroSecPerFrame; + DWORD dwMaxBytesPerSec; + DWORD dwPaddingGranularity; + DWORD dwFlags; + DWORD dwTotalFrames; + DWORD dwInitialFrames; + DWORD dwStreams; + DWORD dwSuggestedBufferSize; + DWORD dwWidth; + DWORD dwHeight; + DWORD dwReserved[4]; + } AVIMAINHEADER; */ + avihChunk.seekToStartOfData(); + d = avihChunk.getOutputStream(); + + d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame + // Specifies the number of microseconds between frames. + // This value indicates the overall timing for the file. + + d.writeUInt(0); // dwMaxBytesPerSec + // Specifies the approximate maximum data rate of the file. + // This value indicates the number of bytes per second the system + // must handle to present an AVI sequence as specified by the other + // parameters contained in the main header and stream header chunks. + + d.writeUInt(0); // dwPaddingGranularity + // Specifies the alignment for data, in bytes. Pad the data to multiples + // of this value. + + d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) + // Contains a bitwise combination of zero or more of the following + // flags: + // + // Value Name Description + // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. + // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the + // index, rather than the physical ordering of the + // chunks in the file, to determine the order of + // presentation of the data. For example, this flag + // could be used to create a list of frames for + // editing. + // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. + // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially + // allocated file used for capturing real-time + // video. Applications should warn the user before + // writing over a file with this flag set because + // the user probably defragmented this file. + // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted + // data and software. When this flag is used, + // software should not permit the data to be + // duplicated. + + d.writeUInt(videoFrames.size()); // dwTotalFrames + // Specifies the total number of frames of data in the file. + + d.writeUInt(0); // dwInitialFrames + // Specifies the initial frame for interleaved files. Noninterleaved + // files should specify zero. If you are creating interleaved files, + // specify the number of frames in the file prior to the initial frame + // of the AVI sequence in this member. + // To give the audio driver enough audio to work with, the audio data in + // an interleaved file must be skewed from the video data. Typically, + // the audio data should be moved forward enough frames to allow + // approximately 0.75 seconds of audio data to be preloaded. The + // dwInitialRecords member should be set to the number of frames the + // audio is skewed. Also set the same value for the dwInitialFrames + // member of the AVISTREAMHEADER structure in the audio stream header + + d.writeUInt(1); // dwStreams + // Specifies the number of streams in the file. For example, a file with + // audio and video has two streams. + + d.writeUInt(bufferSize); // dwSuggestedBufferSize + // Specifies the suggested buffer size for reading the file. Generally, + // this size should be large enough to contain the largest chunk in the + // file. If set to zero, or if it is too small, the playback software + // will have to reallocate memory during playback, which will reduce + // performance. For an interleaved file, the buffer size should be large + // enough to read an entire record, and not just a chunk. + + + d.writeUInt(imgWidth); // dwWidth + // Specifies the width of the AVI file in pixels. + + d.writeUInt(imgHeight); // dwHeight + // Specifies the height of the AVI file in pixels. + + d.writeUInt(0); // dwReserved[0] + d.writeUInt(0); // dwReserved[1] + d.writeUInt(0); // dwReserved[2] + d.writeUInt(0); // dwReserved[3] + // Reserved. Set this array to zero. + + /* Write Data into AVI Stream Header Chunk + * ------------- + * The AVISTREAMHEADER structure contains information about one stream + * in an AVI file. + * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx + typedef struct _avistreamheader { + FOURCC fcc; + DWORD cb; + FOURCC fccType; + FOURCC fccHandler; + DWORD dwFlags; + WORD wPriority; + WORD wLanguage; + DWORD dwInitialFrames; + DWORD dwScale; + DWORD dwRate; + DWORD dwStart; + DWORD dwLength; + DWORD dwSuggestedBufferSize; + DWORD dwQuality; + DWORD dwSampleSize; + struct { + short int left; + short int top; + short int right; + short int bottom; + } rcFrame; + } AVISTREAMHEADER; + */ + strhChunk.seekToStartOfData(); + d = strhChunk.getOutputStream(); + d.writeType("vids"); // fccType - vids for video stream + // Contains a FOURCC that specifies the type of the data contained in + // the stream. The following standard AVI values for video and audio are + // defined: + // + // FOURCC Description + // 'auds' Audio stream + // 'mids' MIDI stream + // 'txts' Text stream + // 'vids' Video stream + + switch (videoFormat) { + case RAW: + d.writeType("DIB "); // fccHandler - DIB for Raw RGB + break; + case RLE: + d.writeType("RLE "); // fccHandler - Microsoft RLE + break; + case JPG: + d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG + break; + case PNG: + default: + d.writeType("png "); // fccHandler - png for PNG + break; + } + // Optionally, contains a FOURCC that identifies a specific data + // handler. The data handler is the preferred handler for the stream. + // For audio and video streams, this specifies the codec for decoding + // the stream. + + if (imgDepth <= 8) { + d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES + } else { + d.writeUInt(0); // dwFlags + } + + // Contains any flags for the data stream. The bits in the high-order + // word of these flags are specific to the type of data contained in the + // stream. The following standard flags are defined: + // + // Value Name Description + // AVISF_DISABLED 0x00000001 Indicates this stream should not + // be enabled by default. + // AVISF_VIDEO_PALCHANGES 0x00010000 + // Indicates this video stream contains + // palette changes. This flag warns the playback + // software that it will need to animate the + // palette. + + d.writeUShort(0); // wPriority + // Specifies priority of a stream type. For example, in a file with + // multiple audio streams, the one with the highest priority might be + // the default stream. + + d.writeUShort(0); // wLanguage + // Language tag. + + d.writeUInt(0); // dwInitialFrames + // Specifies how far audio data is skewed ahead of the video frames in + // interleaved files. Typically, this is about 0.75 seconds. If you are + // creating interleaved files, specify the number of frames in the file + // prior to the initial frame of the AVI sequence in this member. For + // more information, see the remarks for the dwInitialFrames member of + // the AVIMAINHEADER structure. + + d.writeUInt(timeScale); // dwScale + // Used with dwRate to specify the time scale that this stream will use. + // Dividing dwRate by dwScale gives the number of samples per second. + // For video streams, this is the frame rate. For audio streams, this + // rate corresponds to the time needed to play nBlockAlign bytes of + // audio, which for PCM audio is the just the sample rate. + + d.writeUInt(frameRate); // dwRate + // See dwScale. + + d.writeUInt(0); // dwStart + // Specifies the starting time for this stream. The units are defined by + // the dwRate and dwScale members in the main file header. Usually, this + // is zero, but it can specify a delay time for a stream that does not + // start concurrently with the file. + + d.writeUInt(videoFrames.size()); // dwLength + // Specifies the length of this stream. The units are defined by the + // dwRate and dwScale members of the stream's header. + + d.writeUInt(bufferSize); // dwSuggestedBufferSize + // Specifies how large a buffer should be used to read this stream. + // Typically, this contains a value corresponding to the largest chunk + // present in the stream. Using the correct buffer size makes playback + // more efficient. Use zero if you do not know the correct buffer size. + + d.writeInt(-1); // dwQuality + // Specifies an indicator of the quality of the data in the stream. + // Quality is represented as a number between 0 and 10,000. + // For compressed data, this typically represents the value of the + // quality parameter passed to the compression software. If set to –1, + // drivers use the default quality value. + + d.writeUInt(0); // dwSampleSize + // Specifies the size of a single sample of data. This is set to zero + // if the samples can vary in size. If this number is nonzero, then + // multiple samples of data can be grouped into a single chunk within + // the file. If it is zero, each sample of data (such as a video frame) + // must be in a separate chunk. For video streams, this number is + // typically zero, although it can be nonzero if all video frames are + // the same size. For audio streams, this number should be the same as + // the nBlockAlign member of the WAVEFORMATEX structure describing the + // audio. + + d.writeUShort(0); // rcFrame.left + d.writeUShort(0); // rcFrame.top + d.writeUShort(imgWidth); // rcFrame.right + d.writeUShort(imgHeight); // rcFrame.bottom + // Specifies the destination rectangle for a text or video stream within + // the movie rectangle specified by the dwWidth and dwHeight members of + // the AVI main header structure. The rcFrame member is typically used + // in support of multiple video streams. Set this rectangle to the + // coordinates corresponding to the movie rectangle to update the whole + // movie rectangle. Units for this member are pixels. The upper-left + // corner of the destination rectangle is relative to the upper-left + // corner of the movie rectangle. + + /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk + /* ------------- + * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx + typedef struct tagBITMAPINFOHEADER { + DWORD biSize; + LONG biWidth; + LONG biHeight; + WORD biPlanes; + WORD biBitCount; + DWORD biCompression; + DWORD biSizeImage; + LONG biXPelsPerMeter; + LONG biYPelsPerMeter; + DWORD biClrUsed; + DWORD biClrImportant; + } BITMAPINFOHEADER; + */ + strfChunk.seekToStartOfData(); + d = strfChunk.getOutputStream(); + d.writeUInt(40); // biSize + // Specifies the number of bytes required by the structure. This value + // does not include the size of the color table or the size of the color + // masks, if they are appended to the end of structure. + + d.writeInt(imgWidth); // biWidth + // Specifies the width of the bitmap, in pixels. + + d.writeInt(imgHeight); // biHeight + // Specifies the height of the bitmap, in pixels. + // + // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is + // a bottom-up DIB with the origin at the lower left corner. If biHeight + // is negative, the bitmap is a top-down DIB with the origin at the + // upper left corner. + // For YUV bitmaps, the bitmap is always top-down, regardless of the + // sign of biHeight. Decoders should offer YUV formats with postive + // biHeight, but for backward compatibility they should accept YUV + // formats with either positive or negative biHeight. + // For compressed formats, biHeight must be positive, regardless of + // image orientation. + + d.writeShort(1); // biPlanes + // Specifies the number of planes for the target device. This value must + // be set to 1. + + d.writeShort(imgDepth); // biBitCount + // Specifies the number of bits per pixel (bpp). For uncompressed + // formats, this value is the average number of bits per pixel. For + // compressed formats, this value is the implied bit depth of the + // uncompressed image, after the image has been decoded. + + switch (videoFormat) { + case RAW: + default: + d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB + break; + case RLE: + if (imgDepth == 8) { + d.writeInt(1); // biCompression - BI_RLE8 + } else if (imgDepth == 4) { + d.writeInt(2); // biCompression - BI_RLE4 + } else { + throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); + } + break; + case JPG: + d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG + break; + case PNG: + d.writeType("png "); // biCompression - png for PNG + break; + } + // For compressed video and YUV formats, this member is a FOURCC code, + // specified as a DWORD in little-endian order. For example, YUYV video + // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC + // Codes. + // + // For uncompressed RGB formats, the following values are possible: + // + // Value Description + // BI_RGB 0x00000000 Uncompressed RGB. + // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. + // Valid for 16-bpp and 32-bpp bitmaps. + // + // Note that BI_JPG and BI_PNG are not valid video formats. + // + // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is + // always RGB 555. If biCompression equals BI_BITFIELDS, the format is + // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE + // structure to determine the specific RGB type. + + switch (videoFormat) { + case RAW: + d.writeInt(0); // biSizeImage + break; + case RLE: + case JPG: + case PNG: + default: + if (imgDepth == 4) { + d.writeInt(imgWidth * imgHeight / 2); // biSizeImage + } else { + int bytesPerPixel = Math.max(1, imgDepth / 8); + d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage + } + break; + } + // Specifies the size, in bytes, of the image. This can be set to 0 for + // uncompressed RGB bitmaps. + + d.writeInt(0); // biXPelsPerMeter + // Specifies the horizontal resolution, in pixels per meter, of the + // target device for the bitmap. + + d.writeInt(0); // biYPelsPerMeter + // Specifies the vertical resolution, in pixels per meter, of the target + // device for the bitmap. + + d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed + // Specifies the number of color indices in the color table that are + // actually used by the bitmap. + + d.writeInt(0); // biClrImportant + // Specifies the number of color indices that are considered important + // for displaying the bitmap. If this value is zero, all colors are + // important. + + if (palette != null) { + for (int i = 0, n = palette.getMapSize(); i < n; ++i) { + /* + * typedef struct tagRGBQUAD { + BYTE rgbBlue; + BYTE rgbGreen; + BYTE rgbRed; + BYTE rgbReserved; // This member is reserved and must be zero. + } RGBQUAD; + */ + d.write(palette.getBlue(i)); + d.write(palette.getGreen(i)); + d.write(palette.getRed(i)); + d.write(0); + } + } + + + // ----------------- + aviChunk.finish(); + } +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/AVIVideoRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/AVIVideoRecorder.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,46 @@ +package com.aurellem.capture.video; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; + + +public class AVIVideoRecorder extends AbstractVideoRecorder{ + + AVIOutputStream out = null; + boolean videoReady = false; + BufferedImage frame; + + public AVIVideoRecorder(File output) throws IOException { + super(output); + this.out = new AVIOutputStream(output, AVIOutputStream.VideoFormat.PNG, 24); + this.out.setVideoCompressionQuality(1.0f); + } + + + public void initVideo (){ + frame = new BufferedImage( + width, height, + BufferedImage.TYPE_INT_RGB); + out.setFrameRate((int) Math.round(this.fps)); + out.setTimeScale(1); + out.setVideoDimension(width, height); + this.videoReady = true; + } + + public void record(BufferedImage rawFrame) { + if (!videoReady){initVideo();} + this.frame.getGraphics().drawImage(rawFrame, 0, 0, null); + try {out.writeFrame(frame);} + catch (IOException e){e.printStackTrace();} + } + + public void finish() { + System.out.println("I'm finished! <3"); + try {out.close();} + catch (IOException e) {e.printStackTrace();} + } + + + +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/AbstractVideoRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/AbstractVideoRecorder.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,144 @@ +package com.aurellem.capture.video; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; + +import com.aurellem.capture.IsoTimer; +import com.jme3.app.Application; +import com.jme3.app.state.AppState; +import com.jme3.app.state.AppStateManager; +import com.jme3.post.SceneProcessor; +import com.jme3.renderer.Camera; +import com.jme3.renderer.RenderManager; +import com.jme3.renderer.ViewPort; +import com.jme3.renderer.queue.RenderQueue; +import com.jme3.texture.FrameBuffer; +import com.jme3.util.BufferUtils; +import com.jme3.util.Screenshots; + +/** + * VideoProcessor copies the frames it receives to video. + * To ensure smooth video at a constant framerate, you should set your + * application's timer to a new {@link IsoTimer}. This class will + * auto-determine the framerate of the video based on the time difference + * between the first two frames it receives, although you can manually set + * the framerate by calling setFps(newFramerate). Be sure to + * place this processor *after* any other processors whose effects you want + * to be included in the output video. You can attach multiple + * VideoProcessors to the same ViewPort. + * + * For example, + * + * someViewPort.addProcessor(new VideoProcessor(file1)); + * someViewPort.addProcessor(someShadowRenderer); + * someViewPort.addProcessor(new VideoProcessor(file2)); + * + * + * will output a video without shadows to file1 and a video + * with shadows to file2 + * + * @author Robert McIntyre + * + */ + +public abstract class AbstractVideoRecorder + implements SceneProcessor, IVideoRecorder, AppState{ + + final File output; + Camera camera; + int width; + int height; + String targetFileName; + FrameBuffer frameBuffer; + Double fps = null; + RenderManager renderManager; + ByteBuffer byteBuffer; + BufferedImage rawFrame; + boolean isInitilized = false; + boolean paused = false; + + public AbstractVideoRecorder(File output) throws IOException { + this.output = output; + this.targetFileName = this.output.getCanonicalPath(); + } + + + public double getFps() {return this.fps;} + + public AbstractVideoRecorder setFps(double fps) { + this.fps = fps; + return this; + } + + public void initialize(RenderManager rm, ViewPort viewPort) { + Camera camera = viewPort.getCamera(); + this.width = camera.getWidth(); + this.height = camera.getHeight(); + + rawFrame = new BufferedImage(width, height, + BufferedImage.TYPE_4BYTE_ABGR); + byteBuffer = BufferUtils.createByteBuffer(width * height * 4 ); + this.renderManager = rm; + this.isInitilized = true; + } + + public void reshape(ViewPort vp, int w, int h) {} + + public boolean isInitialized() {return this.isInitilized;} + + public void preFrame(float tpf) { + if (null == this.fps){ + this.setFps(1.0 / tpf);} + } + + public void postQueue(RenderQueue rq) {} + + public void postFrame(FrameBuffer out) { + if (!this.paused){ + byteBuffer.clear(); + renderManager.getRenderer().readFrameBuffer(out, byteBuffer); + Screenshots.convertScreenShot(byteBuffer, rawFrame); + record(rawFrame); + } + } + + public void cleanup(){ + this.pause(); + this.finish(); + }; + + public void pause(){ + this.paused = true; + } + + public void start(){ + this.paused = false; + } + + // methods from AppState + public void initialize(AppStateManager stateManager, Application app) {} + + public void setEnabled(boolean active) { + if (active) {this.start();} + else {this.pause();} + } + + public boolean isEnabled() { + return this.paused; + } + + public void stateAttached(AppStateManager stateManager) {} + + + public void stateDetached(AppStateManager stateManager) { + this.pause(); + this.finish(); + } + + public void update(float tpf) {} + public void render(RenderManager rm) {} + public void postRender() {} + +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/DataChunkOutputStream.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/DataChunkOutputStream.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,217 @@ +/** + * @(#)DataChunkOutputStream.java 1.1 2011-01-17 + * + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. + * All rights reserved. + * + * You may not use, copy or modify this file, except in compliance with the + * license agreement you entered into with Werner Randelshofer. + * For details see accompanying license terms. + */ +package com.aurellem.capture.video; + +import java.io.*; + +/** + * This output stream filter supports common data types used inside + * of AVI RIFF Data Chunks. + * + * @author Werner Randelshofer + * @version 1.1 2011-01-17 Adds functionality for blocking flush and close. + *
1.0.1 2010-04-05 Removed unused constants. + *
1.0 2008-08-11 Created. + */ +public class DataChunkOutputStream extends FilterOutputStream { + + /** + * The number of bytes written to the data output stream so far. + * If this counter overflows, it will be wrapped to Integer.MAX_VALUE. + */ + protected long written; + + /** Whether flush and close request shall be forwarded to underlying stream.*/ + private boolean forwardFlushAndClose; + + public DataChunkOutputStream(OutputStream out) { + this(out,true); + } + public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) { + super(out); + this.forwardFlushAndClose=forwardFlushAndClose; + } + + /** + * Writes an chunk type identifier (4 bytes). + * @param s A string with a length of 4 characters. + */ + public void writeType(String s) throws IOException { + if (s.length() != 4) { + throw new IllegalArgumentException("type string must have 4 characters"); + } + + try { + out.write(s.getBytes("ASCII"), 0, 4); + incCount(4); + } catch (UnsupportedEncodingException e) { + throw new InternalError(e.toString()); + } + } + + /** + * Writes out a byte to the underlying output stream as + * a 1-byte value. If no exception is thrown, the counter + * written is incremented by 1. + * + * @param v a byte value to be written. + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#out + */ + public final void writeByte(int v) throws IOException { + out.write(v); + incCount(1); + } + + /** + * Writes len bytes from the specified byte array + * starting at offset off to the underlying output stream. + * If no exception is thrown, the counter written is + * incremented by len. + * + * @param b the data. + * @param off the start offset in the data. + * @param len the number of bytes to write. + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#out + */ + @Override + public synchronized void write(byte b[], int off, int len) + throws IOException { + out.write(b, off, len); + incCount(len); + } + + /** + * Writes the specified byte (the low eight bits of the argument + * b) to the underlying output stream. If no exception + * is thrown, the counter written is incremented by + * 1. + *

+ * Implements the write method of OutputStream. + * + * @param b the byte to be written. + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#out + */ + @Override + public synchronized void write(int b) throws IOException { + out.write(b); + incCount(1); + } + + /** + * Writes an int to the underlying output stream as four + * bytes, high byte first. If no exception is thrown, the counter + * written is incremented by 4. + * + * @param v an int to be written. + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#out + */ + public void writeInt(int v) throws IOException { + out.write((v >>> 0) & 0xff); + out.write((v >>> 8) & 0xff); + out.write((v >>> 16) & 0xff); + out.write((v >>> 24) & 0xff); + incCount(4); + } + + /** + * Writes an unsigned 32 bit integer value. + * + * @param v The value + * @throws java.io.IOException + */ + public void writeUInt(long v) throws IOException { + out.write((int) ((v >>> 0) & 0xff)); + out.write((int) ((v >>> 8) & 0xff)); + out.write((int) ((v >>> 16) & 0xff)); + out.write((int) ((v >>> 24) & 0xff)); + incCount(4); + } + + /** + * Writes a signed 16 bit integer value. + * + * @param v The value + * @throws java.io.IOException + */ + public void writeShort(int v) throws IOException { + out.write((int) ((v >>> 0) & 0xff)); + out.write((int) ((v >> 8) & 0xff)); + incCount(2); + } + + public void writeLong(long v) throws IOException { + out.write((int) (v >>> 0) & 0xff); + out.write((int) (v >>> 8) & 0xff); + out.write((int) (v >>> 16) & 0xff); + out.write((int) (v >>> 24) & 0xff); + out.write((int) (v >>> 32) & 0xff); + out.write((int) (v >>> 40) & 0xff); + out.write((int) (v >>> 48) & 0xff); + out.write((int) (v >>> 56) & 0xff); + incCount(8); + } + + public void writeUShort(int v) throws IOException { + out.write((int) ((v >>> 0) & 0xff)); + out.write((int) ((v >> 8) & 0xff)); + incCount(2); + } + + /** + * Increases the written counter by the specified value + * until it reaches Long.MAX_VALUE. + */ + protected void incCount(int value) { + long temp = written + value; + if (temp < 0) { + temp = Long.MAX_VALUE; + } + written = temp; + } + + /** + * Returns the current value of the counter written, + * the number of bytes written to this data output stream so far. + * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. + * + * @return the value of the written field. + * @see java.io.DataOutputStream#written + */ + public final long size() { + return written; + } + + /** + * Sets the value of the counter written to 0. + */ + public void clearCount() { + written = 0; + } + + @Override + public void close() throws IOException { + if (forwardFlushAndClose) { + super.close(); + } + } + + @Override + public void flush() throws IOException { + if (forwardFlushAndClose) { + super.flush(); + } + } + +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/IVideoRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/IVideoRecorder.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,21 @@ +package com.aurellem.capture.video; + +import java.awt.image.BufferedImage; + +public interface IVideoRecorder{ + + void record(BufferedImage image); + + void pause(); + + void start(); + + /** + * closes the video file, writing appropriate headers, trailers, etc. + * After this is called, no more recording can be done. + */ + void finish(); + +} + + diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/ImageOutputStreamAdapter.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/ImageOutputStreamAdapter.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,144 @@ +/* + * @(#)ImageOutputStreamAdapter.java 1.1 2011-01-07 + * + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. + * All rights reserved. + * + * You may not use, copy or modify this file, except in compliance with the + * license agreement you entered into with Werner Randelshofer. + * For details see accompanying license terms. + */ +package com.aurellem.capture.video; + +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import javax.imageio.stream.ImageOutputStream; + +/** + * Adapts an {@code ImageOutputStream} for classes requiring an + * {@code OutputStream}. + * + * @author Werner Randelshofer + * @version 1.1 2011-01-07 Fixes performance. + *
1.0 2010-12-26 Created. + */ +public class ImageOutputStreamAdapter extends OutputStream { + + /** + * The underlying output stream to be filtered. + */ + protected ImageOutputStream out; + + /** + * Creates an output stream filter built on top of the specified + * underlying output stream. + * + * @param out the underlying output stream to be assigned to + * the field this.out for later use, or + * null if this instance is to be + * created without an underlying stream. + */ + public ImageOutputStreamAdapter(ImageOutputStream out) { + this.out = out; + } + + /** + * Writes the specified byte to this output stream. + *

+ * The write method of FilterOutputStream + * calls the write method of its underlying output stream, + * that is, it performs out.write(b). + *

+ * Implements the abstract write method of OutputStream. + * + * @param b the byte. + * @exception IOException if an I/O error occurs. + */ + @Override + public void write(int b) throws IOException { + out.write(b); + } + + /** + * Writes b.length bytes to this output stream. + *

+ * The write method of FilterOutputStream + * calls its write method of three arguments with the + * arguments b, 0, and + * b.length. + *

+ * Note that this method does not call the one-argument + * write method of its underlying stream with the single + * argument b. + * + * @param b the data to be written. + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#write(byte[], int, int) + */ + @Override + public void write(byte b[]) throws IOException { + write(b, 0, b.length); + } + + /** + * Writes len bytes from the specified + * byte array starting at offset off to + * this output stream. + *

+ * The write method of FilterOutputStream + * calls the write method of one argument on each + * byte to output. + *

+ * Note that this method does not call the write method + * of its underlying input stream with the same arguments. Subclasses + * of FilterOutputStream should provide a more efficient + * implementation of this method. + * + * @param b the data. + * @param off the start offset in the data. + * @param len the number of bytes to write. + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#write(int) + */ + @Override + public void write(byte b[], int off, int len) throws IOException { + out.write(b,off,len); + } + + /** + * Flushes this output stream and forces any buffered output bytes + * to be written out to the stream. + *

+ * The flush method of FilterOutputStream + * calls the flush method of its underlying output stream. + * + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#out + */ + @Override + public void flush() throws IOException { + out.flush(); + } + + /** + * Closes this output stream and releases any system resources + * associated with the stream. + *

+ * The close method of FilterOutputStream + * calls its flush method, and then calls the + * close method of its underlying output stream. + * + * @exception IOException if an I/O error occurs. + * @see java.io.FilterOutputStream#flush() + * @see java.io.FilterOutputStream#out + */ + @Override + public void close() throws IOException { + try { + flush(); + } finally { + out.close(); + } + } +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/MicrosoftRLEEncoder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/MicrosoftRLEEncoder.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,402 @@ +/* + * @(#)AppleRLEEncoder.java 1.1.1 2011-01-17 + * + * Copyright © 2011 Werner Randelshofer, Immensee, Switzerland. + * All rights reserved. + * + * You may not use, copy or modify this file, except in compliance with the + * license agreement you entered into with Werner Randelshofer. + * For details see accompanying license terms. + */ +package com.aurellem.capture.video; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; + +import com.aurellem.capture.audio.SeekableByteArrayOutputStream; + +/** + * Implements the run length encoding of the Microsoft RLE format. + *

+ * Each line of a frame is compressed individually. A line consists of two-byte + * op-codes optionally followed by data. The end of the line is marked with + * the EOL op-code. + *

+ * The following op-codes are supported: + *

    + *
  • {@code 0x00 0x00} + *
    Marks the end of a line.
  • + * + *
  • {@code 0x00 0x01} + *
    Marks the end of the bitmap.
  • + * + *
  • {@code 0x00 0x02 x y} + *
    Marks a delta (skip). {@code x} and {@code y} + * indicate the horizontal and vertical offset from the current position. + * {@code x} and {@code y} are unsigned 8-bit values.
  • + * + *
  • {@code 0x00 n data{n} 0x00?} + *
    Marks a literal run. {@code n} + * gives the number of data bytes that follow. {@code n} must be between 3 and + * 255. If n is odd, a pad byte with the value 0x00 must be added. + *
  • + *
  • {@code n data} + *
    Marks a repetition. {@code n} + * gives the number of times the data byte is repeated. {@code n} must be + * between 1 and 255. + *
  • + *
+ * Example: + *
+ * Compressed data         Expanded data
+ *
+ * 03 04                   04 04 04
+ * 05 06                   06 06 06 06 06
+ * 00 03 45 56 67 00       45 56 67
+ * 02 78                   78 78
+ * 00 02 05 01             Move 5 right and 1 down
+ * 02 78                   78 78
+ * 00 00                   End of line
+ * 09 1E                   1E 1E 1E 1E 1E 1E 1E 1E 1E
+ * 00 01                   End of RLE bitmap
+ * 
+ * + * References:
+ * http://wiki.multimedia.cx/index.php?title=Microsoft_RLE
+ * + * @author Werner Randelshofer + * @version 1.1.1 2011-01-17 Removes unused imports. + *
1.1 2011-01-07 Improves performance. + *
1.0 2011-01-05 Created. + */ +public class MicrosoftRLEEncoder { + + private SeekableByteArrayOutputStream tempSeek=new SeekableByteArrayOutputStream(); + private DataChunkOutputStream temp=new DataChunkOutputStream(tempSeek); + + /** Encodes a 8-bit key frame. + * + * @param temp The output stream. Must be set to Big-Endian. + * @param data The image data. + * @param offset The offset to the first pixel in the data array. + * @param length The width of the image in data elements. + * @param step The number to add to offset to get to the next scanline. + */ + public void writeKey8(OutputStream out, byte[] data, int offset, int length, int step, int height) + throws IOException { + tempSeek.reset(); + int ymax = offset + height * step; + int upsideDown = ymax-step+offset; + + // Encode each scanline separately + for (int y = offset; y < ymax; y += step) { + int xy = upsideDown-y; + int xymax = xy + length; + + int literalCount = 0; + int repeatCount = 0; + for (; xy < xymax; ++xy) { + // determine repeat count + byte v = data[xy]; + for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { + if (data[xy] != v) { + break; + } + } + xy -= repeatCount; + if (repeatCount < 3) { + literalCount++; + if (literalCount == 254) { + temp.write(0);temp.write(literalCount); // Literal OP-code + temp.write(data, xy - literalCount + 1, literalCount); + literalCount = 0; + } + } else { + if (literalCount > 0) { + if (literalCount < 3) { + for (; literalCount > 0; --literalCount) { + temp.write(1); // Repeat OP-code + temp.write(data[xy - literalCount]); + } + } else { + temp.write(0);temp.write(literalCount); // Literal OP-code + temp.write(data, xy - literalCount, literalCount); + if (literalCount % 2 == 1) { + temp.write(0); // pad byte + } + literalCount = 0; + } + } + temp.write(repeatCount); // Repeat OP-code + temp.write(v); + xy += repeatCount - 1; + } + } + + // flush literal run + if (literalCount > 0) { + if (literalCount < 3) { + for (; literalCount > 0; --literalCount) { + temp.write(1); // Repeat OP-code + temp.write(data[xy - literalCount]); + } + } else { + temp.write(0);temp.write(literalCount); + temp.write(data, xy - literalCount, literalCount); + if (literalCount % 2 == 1) { + temp.write(0); // pad byte + } + } + literalCount = 0; + } + + temp.write(0);temp.write(0x0000);// End of line + } + temp.write(0);temp.write(0x0001);// End of bitmap + tempSeek.toOutputStream(out); + } + + /** Encodes a 8-bit delta frame. + * + * @param temp The output stream. Must be set to Big-Endian. + * @param data The image data. + * @param prev The image data of the previous frame. + * @param offset The offset to the first pixel in the data array. + * @param length The width of the image in data elements. + * @param step The number to add to offset to get to the next scanline. + */ + public void writeDelta8(OutputStream out, byte[] data, byte[] prev, int offset, int length, int step, int height) + throws IOException { + +tempSeek.reset(); + // Determine whether we can skip lines at the beginning + int ymin; + int ymax = offset + height * step; + int upsideDown = ymax-step+offset; + scanline: + for (ymin = offset; ymin < ymax; ymin += step) { + int xy = upsideDown-ymin; + int xymax = xy + length; + for (; xy < xymax; ++xy) { + if (data[xy] != prev[xy]) { + break scanline; + } + } + } + + if (ymin == ymax) { + // => Frame is identical to previous one + temp.write(0);temp.write(0x0001); // end of bitmap + return; + } + + if (ymin > offset) { + int verticalOffset = ymin / step; + while (verticalOffset > 255) { + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(0); // horizontal offset + temp.write(255); // vertical offset + verticalOffset -= 255; + } + if (verticalOffset == 1) { + temp.write(0);temp.write(0x0000); // End of line OP-code + } else { + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(0); // horizontal offset + temp.write(verticalOffset); // vertical offset + } + } + + + // Determine whether we can skip lines at the end + scanline: + for (; ymax > ymin; ymax -= step) { + int xy = upsideDown-ymax+step; + int xymax = xy + length; + for (; xy < xymax; ++xy) { + if (data[xy] != prev[xy]) { + break scanline; + } + } + } + //System.out.println("MicrosoftRLEEncoder ymin:" + ymin / step + " ymax" + ymax / step); + + + // Encode each scanline + int verticalOffset = 0; + for (int y = ymin; y < ymax; y += step) { + int xy = upsideDown-y; + int xymax = xy + length; + + // determine skip count + int skipCount = 0; + for (; xy < xymax; ++xy, ++skipCount) { + if (data[xy] != prev[xy]) { + break; + } + } + if (skipCount == length) { + // => the entire line can be skipped + ++verticalOffset; + if (verticalOffset == 255) { + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(0); // horizontal offset + temp.write(255); // vertical offset + verticalOffset = 0; + } + continue; + } + + if (verticalOffset > 0 || skipCount > 0) { + if (verticalOffset == 1 && skipCount == 0) { + temp.write(0);temp.write(0x0000); // End of line OP-code + } else { + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(Math.min(255, skipCount)); // horizontal offset + skipCount -= 255; + temp.write(verticalOffset); // vertical offset + } + verticalOffset = 0; + } + while (skipCount > 0) { + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(Math.min(255, skipCount)); // horizontal offset + temp.write(0); // vertical offset + skipCount -= 255; + } + + int literalCount = 0; + int repeatCount = 0; + for (; xy < xymax; ++xy) { + // determine skip count + for (skipCount = 0; xy < xymax; ++xy, ++skipCount) { + if (data[xy] != prev[xy]) { + break; + } + } + xy -= skipCount; + + // determine repeat count + byte v = data[xy]; + for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { + if (data[xy] != v) { + break; + } + } + xy -= repeatCount; + + if (skipCount < 4 && xy + skipCount < xymax && repeatCount < 3) { + literalCount++; + if (literalCount == 254) { + temp.write(0);temp.write(literalCount); // Literal OP-code + temp.write(data, xy - literalCount + 1, literalCount); + literalCount = 0; + } + } else { + if (literalCount > 0) { + if (literalCount < 3) { + for (; literalCount > 0; --literalCount) { + temp.write(1); // Repeat OP-code + temp.write(data[xy - literalCount]); + } + } else { + temp.write(0);temp.write(literalCount); + temp.write(data, xy - literalCount, literalCount); + if (literalCount % 2 == 1) { + temp.write(0); // pad byte + } + } + literalCount = 0; + } + if (xy + skipCount == xymax) { + // => we can skip until the end of the line without + // having to write an op-code + xy += skipCount - 1; + } else if (skipCount >= repeatCount) { + while (skipCount > 255) { + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(255); + temp.write(0); + xy += 255; + skipCount -= 255; + } + temp.write(0);temp.write(0x0002); // Skip OP-code + temp.write(skipCount); + temp.write(0); + xy += skipCount - 1; + } else { + temp.write(repeatCount); // Repeat OP-code + temp.write(v); + xy += repeatCount - 1; + } + } + } + + // flush literal run + if (literalCount > 0) { + if (literalCount < 3) { + for (; literalCount > 0; --literalCount) { + temp.write(1); // Repeat OP-code + temp.write(data[xy - literalCount]); + } + } else { + temp.write(0);temp.write(literalCount); + temp.write(data, xy - literalCount, literalCount); + if (literalCount % 2 == 1) { + temp.write(0); // pad byte + } + } + } + + temp.write(0);temp.write(0x0000); // End of line OP-code + } + + temp.write(0);temp.write(0x0001);// End of bitmap + tempSeek.toOutputStream(out); + } + + public static void main(String[] args) { + byte[] data = {// + 8, 2, 3, 4, 4, 3,7,7,7, 8,// + 8, 1, 1, 1, 1, 2,7,7,7, 8,// + 8, 0, 2, 0, 0, 0,7,7,7, 8,// + 8, 2, 2, 3, 4, 4,7,7,7, 8,// + 8, 1, 4, 4, 4, 5,7,7,7, 8}; + + + byte[] prev = {// + 8, 3, 3, 3, 3, 3,7,7,7, 8,// + 8, 1, 1, 1, 1, 1,7,7,7, 8, // + 8, 5, 5, 5, 5, 0,7,7,7, 8,// + 8, 2, 2, 0, 0, 0,7,7,7, 8,// + 8, 2, 0, 0, 0, 5,7,7,7, 8}; + ByteArrayOutputStream buf = new ByteArrayOutputStream(); + DataChunkOutputStream out = new DataChunkOutputStream(buf); + MicrosoftRLEEncoder enc = new MicrosoftRLEEncoder(); + + try { + enc.writeDelta8(out, data, prev, 1, 8, 10, 5); + //enc.writeKey8(out, data, 1, 8, 10,5); + out.close(); + + byte[] result = buf.toByteArray(); + System.out.println("size:" + result.length); + System.out.println(Arrays.toString(result)); + System.out.print("0x ["); + + for (int i = 0; i < result.length; i++) { + if (i != 0) { + System.out.print(','); + } + String hex = "00" + Integer.toHexString(result[i]); + System.out.print(hex.substring(hex.length() - 2)); + } + System.out.println(']'); + + } catch (IOException ex) { + ex.printStackTrace(); + } + } +} diff -r dde12be02029 -r 5dfc9e768816 src/com/aurellem/capture/video/XuggleVideoRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/com/aurellem/capture/video/XuggleVideoRecorder.java Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,53 @@ +package com.aurellem.capture.video; + + +/** + * Handles writing video files using Xuggle. + * + * + * @author Robert McIntyre + * + */ +/* +public class XuggleVideoRecorder extends AbstractVideoRecorder{ + + + IMediaWriter writer; + BufferedImage frame; + int videoChannel = 0; + long currentTimeStamp = 0; + boolean videoReady = false; + + + public XuggleVideoRecorder(File output) throws IOException {super(output);} + + public void initVideo(){ + this.frame = new BufferedImage( + width, height, + BufferedImage.TYPE_3BYTE_BGR); + this.writer = ToolFactory.makeWriter(this.targetFileName); + writer.addVideoStream(videoChannel, + 0, IRational.make(fps), + width, height); + this.videoReady = true; + } + + + public void record(BufferedImage rawFrame) { + if (!this.videoReady){initVideo();} + // convert the Image into the form that Xuggle likes. + this.frame.getGraphics().drawImage(rawFrame, 0, 0, null); + writer.encodeVideo(videoChannel, + frame, + currentTimeStamp, TimeUnit.NANOSECONDS); + + currentTimeStamp += (long) (1000000000.0 / fps); + } + + public void finish() { + writer.close(); + } + +} + +*/ diff -r dde12be02029 -r 5dfc9e768816 test.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test.sh Wed Oct 26 08:54:12 2011 -0700 @@ -0,0 +1,11 @@ + +sound=/home/r/tmp/data1.wav + +if [ -e $sound ] +then + aplay /home/r/tmp/data1.wav + rm /home/r/tmp/data1.wav +else + echo "$sound does not exist." +fi +