Mercurial > jmeCapture
changeset 3:a92de00f0414
migrating files
line wrap: on
line diff
1.1 --- a/build.xml Tue Oct 25 11:18:59 2011 -0700 1.2 +++ b/build.xml Tue Oct 25 11:55:55 2011 -0700 1.3 @@ -8,6 +8,7 @@ 1.4 1.5 <path id="classpath"> 1.6 <pathelement path="${lib}/jme"/> 1.7 + <pathelement path="${lib}/lwjgl.jar"/> 1.8 </path> 1.9 1.10 <target name="prepare">
2.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 2.2 +++ b/src/com/aurellem/capture/AVIOutputStream.java Tue Oct 25 11:55:55 2011 -0700 2.3 @@ -0,0 +1,1548 @@ 2.4 +/** 2.5 + * @(#)AVIOutputStream.java 1.5.1 2011-01-17 2.6 + * 2.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. 2.8 + * All rights reserved. 2.9 + * 2.10 + * You may not use, copy or modify this file, except in compliance with the 2.11 + * license agreement you entered into with Werner Randelshofer. 2.12 + * For details see accompanying license terms. 2.13 + */ 2.14 +package com.aurellem.capture; 2.15 + 2.16 +import java.awt.Dimension; 2.17 +import java.awt.image.BufferedImage; 2.18 +import java.awt.image.DataBufferByte; 2.19 +import java.awt.image.IndexColorModel; 2.20 +import java.awt.image.WritableRaster; 2.21 +import java.io.File; 2.22 +import java.io.FileInputStream; 2.23 +import java.io.IOException; 2.24 +import java.io.InputStream; 2.25 +import java.io.OutputStream; 2.26 +import java.util.Arrays; 2.27 +import java.util.Date; 2.28 +import java.util.LinkedList; 2.29 + 2.30 +import javax.imageio.IIOImage; 2.31 +import javax.imageio.ImageIO; 2.32 +import javax.imageio.ImageWriteParam; 2.33 +import javax.imageio.ImageWriter; 2.34 +import javax.imageio.stream.FileImageOutputStream; 2.35 +import javax.imageio.stream.ImageOutputStream; 2.36 +import javax.imageio.stream.MemoryCacheImageOutputStream; 2.37 + 2.38 +/** 2.39 + * This class supports writing of images into an AVI 1.0 video file. 2.40 + * <p> 2.41 + * The images are written as video frames. 2.42 + * <p> 2.43 + * Video frames can be encoded with one of the following formats: 2.44 + * <ul> 2.45 + * <li>JPEG</li> 2.46 + * <li>PNG</li> 2.47 + * <li>RAW</li> 2.48 + * <li>RLE</li> 2.49 + * </ul> 2.50 + * All frames must have the same format. 2.51 + * When JPG is used each frame can have an individual encoding quality. 2.52 + * <p> 2.53 + * All frames in an AVI file must have the same duration. The duration can 2.54 + * be set by setting an appropriate pair of values using methods 2.55 + * {@link #setFrameRate} and {@link #setTimeScale}. 2.56 + * <p> 2.57 + * The length of an AVI 1.0 file is limited to 1 GB. 2.58 + * This class supports lengths of up to 4 GB, but such files may not work on 2.59 + * all players. 2.60 + * <p> 2.61 + * For detailed information about the AVI RIFF file format see:<br> 2.62 + * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br> 2.63 + * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br> 2.64 + * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br> 2.65 + * 2.66 + * @author Werner Randelshofer 2.67 + * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. 2.68 + * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format. 2.69 + * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets 2.70 + * in "idx1" chunk. 2.71 + * <br>1.3.2 2010-12-27 File size limit is 1 GB. 2.72 + * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets. 2.73 + * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream. 2.74 + * Added method getVideoDimension(). 2.75 + * <br>1.2 2009-08-29 Adds support for RAW video format. 2.76 + * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih 2.77 + * chunk. Changed the API to reflect that AVI works with frame rates instead of 2.78 + * with frame durations. 2.79 + * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG 2.80 + * encoded video. 2.81 + * <br>1.0 2008-08-11 Created. 2.82 + */ 2.83 +public class AVIOutputStream { 2.84 + 2.85 + /** 2.86 + * Underlying output stream. 2.87 + */ 2.88 + private ImageOutputStream out; 2.89 + /** The offset of the QuickTime stream in the underlying ImageOutputStream. 2.90 + * Normally this is 0 unless the underlying stream already contained data 2.91 + * when it was passed to the constructor. 2.92 + */ 2.93 + private long streamOffset; 2.94 + /** Previous frame for delta compression. */ 2.95 + private Object previousData; 2.96 + 2.97 + /** 2.98 + * Supported video encodings. 2.99 + */ 2.100 + public static enum VideoFormat { 2.101 + 2.102 + RAW, RLE, JPG, PNG; 2.103 + } 2.104 + /** 2.105 + * Current video formats. 2.106 + */ 2.107 + private VideoFormat videoFormat; 2.108 + /** 2.109 + * Quality of JPEG encoded video frames. 2.110 + */ 2.111 + private float quality = 0.9f; 2.112 + /** 2.113 + * Creation time of the movie output stream. 2.114 + */ 2.115 + private Date creationTime; 2.116 + /** 2.117 + * Width of the video frames. All frames must have the same width. 2.118 + * The value -1 is used to mark unspecified width. 2.119 + */ 2.120 + private int imgWidth = -1; 2.121 + /** 2.122 + * Height of the video frames. All frames must have the same height. 2.123 + * The value -1 is used to mark unspecified height. 2.124 + */ 2.125 + private int imgHeight = -1; 2.126 + /** Number of bits per pixel. */ 2.127 + private int imgDepth = 24; 2.128 + /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ 2.129 + private IndexColorModel palette; 2.130 + private IndexColorModel previousPalette; 2.131 + /** Video encoder. */ 2.132 + 2.133 + /** 2.134 + * The timeScale of the movie. 2.135 + * <p> 2.136 + * Used with frameRate to specify the time scale that this stream will use. 2.137 + * Dividing frameRate by timeScale gives the number of samples per second. 2.138 + * For video streams, this is the frame rate. For audio streams, this rate 2.139 + * corresponds to the time needed to play nBlockAlign bytes of audio, which 2.140 + * for PCM audio is the just the sample rate. 2.141 + */ 2.142 + private int timeScale = 1; 2.143 + /** 2.144 + * The frameRate of the movie in timeScale units. 2.145 + * <p> 2.146 + * @see timeScale 2.147 + */ 2.148 + private int frameRate = 30; 2.149 + /** Interval between keyframes. */ 2.150 + private int syncInterval = 30; 2.151 + 2.152 + /** 2.153 + * The states of the movie output stream. 2.154 + */ 2.155 + private static enum States { 2.156 + 2.157 + STARTED, FINISHED, CLOSED; 2.158 + } 2.159 + /** 2.160 + * The current state of the movie output stream. 2.161 + */ 2.162 + private States state = States.FINISHED; 2.163 + 2.164 + /** 2.165 + * AVI stores media data in samples. 2.166 + * A sample is a single element in a sequence of time-ordered data. 2.167 + */ 2.168 + private static class Sample { 2.169 + 2.170 + String chunkType; 2.171 + /** Offset of the sample relative to the start of the AVI file. 2.172 + */ 2.173 + long offset; 2.174 + /** Data length of the sample. */ 2.175 + long length; 2.176 + /** 2.177 + * The duration of the sample in time scale units. 2.178 + */ 2.179 + int duration; 2.180 + /** Whether the sample is a sync-sample. */ 2.181 + boolean isSync; 2.182 + 2.183 + /** 2.184 + * Creates a new sample. 2.185 + * @param duration 2.186 + * @param offset 2.187 + * @param length 2.188 + */ 2.189 + public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { 2.190 + this.chunkType = chunkId; 2.191 + this.duration = duration; 2.192 + this.offset = offset; 2.193 + this.length = length; 2.194 + this.isSync = isSync; 2.195 + } 2.196 + } 2.197 + /** 2.198 + * List of video frames. 2.199 + */ 2.200 + private LinkedList<Sample> videoFrames; 2.201 + /** 2.202 + * This chunk holds the whole AVI content. 2.203 + */ 2.204 + private CompositeChunk aviChunk; 2.205 + /** 2.206 + * This chunk holds the movie frames. 2.207 + */ 2.208 + private CompositeChunk moviChunk; 2.209 + /** 2.210 + * This chunk holds the AVI Main Header. 2.211 + */ 2.212 + FixedSizeDataChunk avihChunk; 2.213 + /** 2.214 + * This chunk holds the AVI Stream Header. 2.215 + */ 2.216 + FixedSizeDataChunk strhChunk; 2.217 + /** 2.218 + * This chunk holds the AVI Stream Format Header. 2.219 + */ 2.220 + FixedSizeDataChunk strfChunk; 2.221 + 2.222 + /** 2.223 + * Chunk base class. 2.224 + */ 2.225 + private abstract class Chunk { 2.226 + 2.227 + /** 2.228 + * The chunkType of the chunk. A String with the length of 4 characters. 2.229 + */ 2.230 + protected String chunkType; 2.231 + /** 2.232 + * The offset of the chunk relative to the start of the 2.233 + * ImageOutputStream. 2.234 + */ 2.235 + protected long offset; 2.236 + 2.237 + /** 2.238 + * Creates a new Chunk at the current position of the ImageOutputStream. 2.239 + * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. 2.240 + */ 2.241 + public Chunk(String chunkType) throws IOException { 2.242 + this.chunkType = chunkType; 2.243 + offset = getRelativeStreamPosition(); 2.244 + } 2.245 + 2.246 + /** 2.247 + * Writes the chunk to the ImageOutputStream and disposes it. 2.248 + */ 2.249 + public abstract void finish() throws IOException; 2.250 + 2.251 + /** 2.252 + * Returns the size of the chunk including the size of the chunk header. 2.253 + * @return The size of the chunk. 2.254 + */ 2.255 + public abstract long size(); 2.256 + } 2.257 + 2.258 + /** 2.259 + * A CompositeChunk contains an ordered list of Chunks. 2.260 + */ 2.261 + private class CompositeChunk extends Chunk { 2.262 + 2.263 + /** 2.264 + * The type of the composite. A String with the length of 4 characters. 2.265 + */ 2.266 + protected String compositeType; 2.267 + private LinkedList<Chunk> children; 2.268 + private boolean finished; 2.269 + 2.270 + /** 2.271 + * Creates a new CompositeChunk at the current position of the 2.272 + * ImageOutputStream. 2.273 + * @param compositeType The type of the composite. 2.274 + * @param chunkType The type of the chunk. 2.275 + */ 2.276 + public CompositeChunk(String compositeType, String chunkType) throws IOException { 2.277 + super(chunkType); 2.278 + this.compositeType = compositeType; 2.279 + //out.write 2.280 + out.writeLong(0); // make room for the chunk header 2.281 + out.writeInt(0); // make room for the chunk header 2.282 + children = new LinkedList<Chunk>(); 2.283 + } 2.284 + 2.285 + public void add(Chunk child) throws IOException { 2.286 + if (children.size() > 0) { 2.287 + children.getLast().finish(); 2.288 + } 2.289 + children.add(child); 2.290 + } 2.291 + 2.292 + /** 2.293 + * Writes the chunk and all its children to the ImageOutputStream 2.294 + * and disposes of all resources held by the chunk. 2.295 + * @throws java.io.IOException 2.296 + */ 2.297 + @Override 2.298 + public void finish() throws IOException { 2.299 + if (!finished) { 2.300 + if (size() > 0xffffffffL) { 2.301 + throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); 2.302 + } 2.303 + 2.304 + long pointer = getRelativeStreamPosition(); 2.305 + seekRelative(offset); 2.306 + 2.307 + DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 2.308 + headerData.writeType(compositeType); 2.309 + headerData.writeUInt(size() - 8); 2.310 + headerData.writeType(chunkType); 2.311 + for (Chunk child : children) { 2.312 + child.finish(); 2.313 + } 2.314 + seekRelative(pointer); 2.315 + if (size() % 2 == 1) { 2.316 + out.writeByte(0); // write pad byte 2.317 + } 2.318 + finished = true; 2.319 + } 2.320 + } 2.321 + 2.322 + @Override 2.323 + public long size() { 2.324 + long length = 12; 2.325 + for (Chunk child : children) { 2.326 + length += child.size() + child.size() % 2; 2.327 + } 2.328 + return length; 2.329 + } 2.330 + } 2.331 + 2.332 + /** 2.333 + * Data Chunk. 2.334 + */ 2.335 + private class DataChunk extends Chunk { 2.336 + 2.337 + private DataChunkOutputStream data; 2.338 + private boolean finished; 2.339 + 2.340 + /** 2.341 + * Creates a new DataChunk at the current position of the 2.342 + * ImageOutputStream. 2.343 + * @param chunkType The chunkType of the chunk. 2.344 + */ 2.345 + public DataChunk(String name) throws IOException { 2.346 + super(name); 2.347 + out.writeLong(0); // make room for the chunk header 2.348 + data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); 2.349 + } 2.350 + 2.351 + public DataChunkOutputStream getOutputStream() { 2.352 + if (finished) { 2.353 + throw new IllegalStateException("DataChunk is finished"); 2.354 + } 2.355 + return data; 2.356 + } 2.357 + 2.358 + /** 2.359 + * Returns the offset of this chunk to the beginning of the random access file 2.360 + * @return 2.361 + */ 2.362 + public long getOffset() { 2.363 + return offset; 2.364 + } 2.365 + 2.366 + @Override 2.367 + public void finish() throws IOException { 2.368 + if (!finished) { 2.369 + long sizeBefore = size(); 2.370 + 2.371 + if (size() > 0xffffffffL) { 2.372 + throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); 2.373 + } 2.374 + 2.375 + long pointer = getRelativeStreamPosition(); 2.376 + seekRelative(offset); 2.377 + 2.378 + DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 2.379 + headerData.writeType(chunkType); 2.380 + headerData.writeUInt(size() - 8); 2.381 + seekRelative(pointer); 2.382 + if (size() % 2 == 1) { 2.383 + out.writeByte(0); // write pad byte 2.384 + } 2.385 + finished = true; 2.386 + long sizeAfter = size(); 2.387 + if (sizeBefore != sizeAfter) { 2.388 + System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); 2.389 + } 2.390 + } 2.391 + } 2.392 + 2.393 + @Override 2.394 + public long size() { 2.395 + return 8 + data.size(); 2.396 + } 2.397 + } 2.398 + 2.399 + /** 2.400 + * A DataChunk with a fixed size. 2.401 + */ 2.402 + private class FixedSizeDataChunk extends Chunk { 2.403 + 2.404 + private DataChunkOutputStream data; 2.405 + private boolean finished; 2.406 + private long fixedSize; 2.407 + 2.408 + /** 2.409 + * Creates a new DataChunk at the current position of the 2.410 + * ImageOutputStream. 2.411 + * @param chunkType The chunkType of the chunk. 2.412 + */ 2.413 + public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { 2.414 + super(chunkType); 2.415 + this.fixedSize = fixedSize; 2.416 + data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); 2.417 + data.writeType(chunkType); 2.418 + data.writeUInt(fixedSize); 2.419 + data.clearCount(); 2.420 + 2.421 + // Fill fixed size with nulls 2.422 + byte[] buf = new byte[(int) Math.min(512, fixedSize)]; 2.423 + long written = 0; 2.424 + while (written < fixedSize) { 2.425 + data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); 2.426 + written += Math.min(buf.length, fixedSize - written); 2.427 + } 2.428 + if (fixedSize % 2 == 1) { 2.429 + out.writeByte(0); // write pad byte 2.430 + } 2.431 + seekToStartOfData(); 2.432 + } 2.433 + 2.434 + public DataChunkOutputStream getOutputStream() { 2.435 + /*if (finished) { 2.436 + throw new IllegalStateException("DataChunk is finished"); 2.437 + }*/ 2.438 + return data; 2.439 + } 2.440 + 2.441 + /** 2.442 + * Returns the offset of this chunk to the beginning of the random access file 2.443 + * @return 2.444 + */ 2.445 + public long getOffset() { 2.446 + return offset; 2.447 + } 2.448 + 2.449 + public void seekToStartOfData() throws IOException { 2.450 + seekRelative(offset + 8); 2.451 + data.clearCount(); 2.452 + } 2.453 + 2.454 + public void seekToEndOfChunk() throws IOException { 2.455 + seekRelative(offset + 8 + fixedSize + fixedSize % 2); 2.456 + } 2.457 + 2.458 + @Override 2.459 + public void finish() throws IOException { 2.460 + if (!finished) { 2.461 + finished = true; 2.462 + } 2.463 + } 2.464 + 2.465 + @Override 2.466 + public long size() { 2.467 + return 8 + fixedSize; 2.468 + } 2.469 + } 2.470 + 2.471 + /** 2.472 + * Creates a new AVI file with the specified video format and 2.473 + * frame rate. The video has 24 bits per pixel. 2.474 + * 2.475 + * @param file the output file 2.476 + * @param format Selects an encoder for the video format. 2.477 + * @param bitsPerPixel the number of bits per pixel. 2.478 + * @exception IllegalArgumentException if videoFormat is null or if 2.479 + * frame rate is <= 0 2.480 + */ 2.481 + public AVIOutputStream(File file, VideoFormat format) throws IOException { 2.482 + this(file,format,24); 2.483 + } 2.484 + /** 2.485 + * Creates a new AVI file with the specified video format and 2.486 + * frame rate. 2.487 + * 2.488 + * @param file the output file 2.489 + * @param format Selects an encoder for the video format. 2.490 + * @param bitsPerPixel the number of bits per pixel. 2.491 + * @exception IllegalArgumentException if videoFormat is null or if 2.492 + * frame rate is <= 0 2.493 + */ 2.494 + public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { 2.495 + if (format == null) { 2.496 + throw new IllegalArgumentException("format must not be null"); 2.497 + } 2.498 + 2.499 + if (file.exists()) { 2.500 + file.delete(); 2.501 + } 2.502 + this.out = new FileImageOutputStream(file); 2.503 + this.streamOffset = 0; 2.504 + this.videoFormat = format; 2.505 + this.videoFrames = new LinkedList<Sample>(); 2.506 + this.imgDepth = bitsPerPixel; 2.507 + if (imgDepth == 4) { 2.508 + byte[] gray = new byte[16]; 2.509 + for (int i = 0; i < gray.length; i++) { 2.510 + gray[i] = (byte) ((i << 4) | i); 2.511 + } 2.512 + palette = new IndexColorModel(4, 16, gray, gray, gray); 2.513 + } else if (imgDepth == 8) { 2.514 + byte[] gray = new byte[256]; 2.515 + for (int i = 0; i < gray.length; i++) { 2.516 + gray[i] = (byte) i; 2.517 + } 2.518 + palette = new IndexColorModel(8, 256, gray, gray, gray); 2.519 + } 2.520 + 2.521 + } 2.522 + 2.523 + /** 2.524 + * Creates a new AVI output stream with the specified video format and 2.525 + * framerate. 2.526 + * 2.527 + * @param out the underlying output stream 2.528 + * @param format Selects an encoder for the video format. 2.529 + * @exception IllegalArgumentException if videoFormat is null or if 2.530 + * framerate is <= 0 2.531 + */ 2.532 + public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { 2.533 + if (format == null) { 2.534 + throw new IllegalArgumentException("format must not be null"); 2.535 + } 2.536 + this.out = out; 2.537 + this.streamOffset = out.getStreamPosition(); 2.538 + this.videoFormat = format; 2.539 + this.videoFrames = new LinkedList<Sample>(); 2.540 + } 2.541 + 2.542 + /** 2.543 + * Used with frameRate to specify the time scale that this stream will use. 2.544 + * Dividing frameRate by timeScale gives the number of samples per second. 2.545 + * For video streams, this is the frame rate. For audio streams, this rate 2.546 + * corresponds to the time needed to play nBlockAlign bytes of audio, which 2.547 + * for PCM audio is the just the sample rate. 2.548 + * <p> 2.549 + * The default value is 1. 2.550 + * 2.551 + * @param newValue 2.552 + */ 2.553 + public void setTimeScale(int newValue) { 2.554 + if (newValue <= 0) { 2.555 + throw new IllegalArgumentException("timeScale must be greater 0"); 2.556 + } 2.557 + this.timeScale = newValue; 2.558 + } 2.559 + 2.560 + /** 2.561 + * Returns the time scale of this media. 2.562 + * 2.563 + * @return time scale 2.564 + */ 2.565 + public int getTimeScale() { 2.566 + return timeScale; 2.567 + } 2.568 + 2.569 + /** 2.570 + * Sets the rate of video frames in time scale units. 2.571 + * <p> 2.572 + * The default value is 30. Together with the default value 1 of timeScale 2.573 + * this results in 30 frames pers second. 2.574 + * 2.575 + * @param newValue 2.576 + */ 2.577 + public void setFrameRate(int newValue) { 2.578 + if (newValue <= 0) { 2.579 + throw new IllegalArgumentException("frameDuration must be greater 0"); 2.580 + } 2.581 + if (state == States.STARTED) { 2.582 + throw new IllegalStateException("frameDuration must be set before the first frame is written"); 2.583 + } 2.584 + this.frameRate = newValue; 2.585 + } 2.586 + 2.587 + /** 2.588 + * Returns the frame rate of this media. 2.589 + * 2.590 + * @return frame rate 2.591 + */ 2.592 + public int getFrameRate() { 2.593 + return frameRate; 2.594 + } 2.595 + 2.596 + /** Sets the global color palette. */ 2.597 + public void setPalette(IndexColorModel palette) { 2.598 + this.palette = palette; 2.599 + } 2.600 + 2.601 + /** 2.602 + * Sets the compression quality of the video track. 2.603 + * A value of 0 stands for "high compression is important" a value of 2.604 + * 1 for "high image quality is important". 2.605 + * <p> 2.606 + * Changing this value affects frames which are subsequently written 2.607 + * to the AVIOutputStream. Frames which have already been written 2.608 + * are not changed. 2.609 + * <p> 2.610 + * This value has only effect on videos encoded with JPG format. 2.611 + * <p> 2.612 + * The default value is 0.9. 2.613 + * 2.614 + * @param newValue 2.615 + */ 2.616 + public void setVideoCompressionQuality(float newValue) { 2.617 + this.quality = newValue; 2.618 + } 2.619 + 2.620 + /** 2.621 + * Returns the video compression quality. 2.622 + * 2.623 + * @return video compression quality 2.624 + */ 2.625 + public float getVideoCompressionQuality() { 2.626 + return quality; 2.627 + } 2.628 + 2.629 + /** 2.630 + * Sets the dimension of the video track. 2.631 + * <p> 2.632 + * You need to explicitly set the dimension, if you add all frames from 2.633 + * files or input streams. 2.634 + * <p> 2.635 + * If you add frames from buffered images, then AVIOutputStream 2.636 + * can determine the video dimension from the image width and height. 2.637 + * 2.638 + * @param width Must be greater than 0. 2.639 + * @param height Must be greater than 0. 2.640 + */ 2.641 + public void setVideoDimension(int width, int height) { 2.642 + if (width < 1 || height < 1) { 2.643 + throw new IllegalArgumentException("width and height must be greater zero."); 2.644 + } 2.645 + this.imgWidth = width; 2.646 + this.imgHeight = height; 2.647 + } 2.648 + 2.649 + /** 2.650 + * Gets the dimension of the video track. 2.651 + * <p> 2.652 + * Returns null if the dimension is not known. 2.653 + */ 2.654 + public Dimension getVideoDimension() { 2.655 + if (imgWidth < 1 || imgHeight < 1) { 2.656 + return null; 2.657 + } 2.658 + return new Dimension(imgWidth, imgHeight); 2.659 + } 2.660 + 2.661 + /** 2.662 + * Sets the state of the QuickTimeOutpuStream to started. 2.663 + * <p> 2.664 + * If the state is changed by this method, the prolog is 2.665 + * written. 2.666 + */ 2.667 + private void ensureStarted() throws IOException { 2.668 + if (state != States.STARTED) { 2.669 + creationTime = new Date(); 2.670 + writeProlog(); 2.671 + state = States.STARTED; 2.672 + } 2.673 + } 2.674 + 2.675 + /** 2.676 + * Writes a frame to the video track. 2.677 + * <p> 2.678 + * If the dimension of the video track has not been specified yet, it 2.679 + * is derived from the first buffered image added to the AVIOutputStream. 2.680 + * 2.681 + * @param image The frame image. 2.682 + * 2.683 + * @throws IllegalArgumentException if the duration is less than 1, or 2.684 + * if the dimension of the frame does not match the dimension of the video 2.685 + * track. 2.686 + * @throws IOException if writing the image failed. 2.687 + */ 2.688 + public void writeFrame(BufferedImage image) throws IOException { 2.689 + ensureOpen(); 2.690 + ensureStarted(); 2.691 + 2.692 + // Get the dimensions of the first image 2.693 + if (imgWidth == -1) { 2.694 + imgWidth = image.getWidth(); 2.695 + imgHeight = image.getHeight(); 2.696 + } else { 2.697 + // The dimension of the image must match the dimension of the video track 2.698 + if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { 2.699 + throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() 2.700 + + "] (width=" + image.getWidth() + ", height=" + image.getHeight() 2.701 + + ") differs from image[0] (width=" 2.702 + + imgWidth + ", height=" + imgHeight); 2.703 + } 2.704 + } 2.705 + 2.706 + DataChunk videoFrameChunk; 2.707 + long offset = getRelativeStreamPosition(); 2.708 + boolean isSync = true; 2.709 + switch (videoFormat) { 2.710 + case RAW: { 2.711 + switch (imgDepth) { 2.712 + case 4: { 2.713 + IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); 2.714 + int[] imgRGBs = new int[16]; 2.715 + imgPalette.getRGBs(imgRGBs); 2.716 + int[] previousRGBs = new int[16]; 2.717 + if (previousPalette == null) { 2.718 + previousPalette = palette; 2.719 + } 2.720 + previousPalette.getRGBs(previousRGBs); 2.721 + if (!Arrays.equals(imgRGBs, previousRGBs)) { 2.722 + previousPalette = imgPalette; 2.723 + DataChunk paletteChangeChunk = new DataChunk("00pc"); 2.724 + /* 2.725 + int first = imgPalette.getMapSize(); 2.726 + int last = -1; 2.727 + for (int i = 0; i < 16; i++) { 2.728 + if (previousRGBs[i] != imgRGBs[i] && i < first) { 2.729 + first = i; 2.730 + } 2.731 + if (previousRGBs[i] != imgRGBs[i] && i > last) { 2.732 + last = i; 2.733 + } 2.734 + }*/ 2.735 + int first = 0; 2.736 + int last = imgPalette.getMapSize() - 1; 2.737 + /* 2.738 + * typedef struct { 2.739 + BYTE bFirstEntry; 2.740 + BYTE bNumEntries; 2.741 + WORD wFlags; 2.742 + PALETTEENTRY peNew[]; 2.743 + } AVIPALCHANGE; 2.744 + * 2.745 + * typedef struct tagPALETTEENTRY { 2.746 + BYTE peRed; 2.747 + BYTE peGreen; 2.748 + BYTE peBlue; 2.749 + BYTE peFlags; 2.750 + } PALETTEENTRY; 2.751 + */ 2.752 + DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); 2.753 + pOut.writeByte(first);//bFirstEntry 2.754 + pOut.writeByte(last - first + 1);//bNumEntries 2.755 + pOut.writeShort(0);//wFlags 2.756 + 2.757 + for (int i = first; i <= last; i++) { 2.758 + pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red 2.759 + pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green 2.760 + pOut.writeByte(imgRGBs[i] & 0xff); // blue 2.761 + pOut.writeByte(0); // reserved*/ 2.762 + } 2.763 + 2.764 + moviChunk.add(paletteChangeChunk); 2.765 + paletteChangeChunk.finish(); 2.766 + long length = getRelativeStreamPosition() - offset; 2.767 + videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); 2.768 + offset = getRelativeStreamPosition(); 2.769 + } 2.770 + 2.771 + videoFrameChunk = new DataChunk("00db"); 2.772 + byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); 2.773 + byte[] rgb4 = new byte[imgWidth / 2]; 2.774 + for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down 2.775 + for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { 2.776 + rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); 2.777 + } 2.778 + videoFrameChunk.getOutputStream().write(rgb4); 2.779 + } 2.780 + break; 2.781 + } 2.782 + case 8: { 2.783 + IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); 2.784 + int[] imgRGBs = new int[256]; 2.785 + imgPalette.getRGBs(imgRGBs); 2.786 + int[] previousRGBs = new int[256]; 2.787 + if (previousPalette == null) { 2.788 + previousPalette = palette; 2.789 + } 2.790 + previousPalette.getRGBs(previousRGBs); 2.791 + if (!Arrays.equals(imgRGBs, previousRGBs)) { 2.792 + previousPalette = imgPalette; 2.793 + DataChunk paletteChangeChunk = new DataChunk("00pc"); 2.794 + /* 2.795 + int first = imgPalette.getMapSize(); 2.796 + int last = -1; 2.797 + for (int i = 0; i < 16; i++) { 2.798 + if (previousRGBs[i] != imgRGBs[i] && i < first) { 2.799 + first = i; 2.800 + } 2.801 + if (previousRGBs[i] != imgRGBs[i] && i > last) { 2.802 + last = i; 2.803 + } 2.804 + }*/ 2.805 + int first = 0; 2.806 + int last = imgPalette.getMapSize() - 1; 2.807 + /* 2.808 + * typedef struct { 2.809 + BYTE bFirstEntry; 2.810 + BYTE bNumEntries; 2.811 + WORD wFlags; 2.812 + PALETTEENTRY peNew[]; 2.813 + } AVIPALCHANGE; 2.814 + * 2.815 + * typedef struct tagPALETTEENTRY { 2.816 + BYTE peRed; 2.817 + BYTE peGreen; 2.818 + BYTE peBlue; 2.819 + BYTE peFlags; 2.820 + } PALETTEENTRY; 2.821 + */ 2.822 + DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); 2.823 + pOut.writeByte(first);//bFirstEntry 2.824 + pOut.writeByte(last - first + 1);//bNumEntries 2.825 + pOut.writeShort(0);//wFlags 2.826 + 2.827 + for (int i = first; i <= last; i++) { 2.828 + pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red 2.829 + pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green 2.830 + pOut.writeByte(imgRGBs[i] & 0xff); // blue 2.831 + pOut.writeByte(0); // reserved*/ 2.832 + } 2.833 + 2.834 + moviChunk.add(paletteChangeChunk); 2.835 + paletteChangeChunk.finish(); 2.836 + long length = getRelativeStreamPosition() - offset; 2.837 + videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); 2.838 + offset = getRelativeStreamPosition(); 2.839 + } 2.840 + 2.841 + videoFrameChunk = new DataChunk("00db"); 2.842 + byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); 2.843 + for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down 2.844 + videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); 2.845 + } 2.846 + break; 2.847 + } 2.848 + default: { 2.849 + videoFrameChunk = new DataChunk("00db"); 2.850 + WritableRaster raster = image.getRaster(); 2.851 + int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data 2.852 + byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data 2.853 + for (int y = imgHeight - 1; y >= 0; --y) { // Upside down 2.854 + raster.getPixels(0, y, imgWidth, 1, raw); 2.855 + for (int x = 0, n = imgWidth * 3; x < n; x += 3) { 2.856 + bytes[x + 2] = (byte) raw[x]; // Blue 2.857 + bytes[x + 1] = (byte) raw[x + 1]; // Green 2.858 + bytes[x] = (byte) raw[x + 2]; // Red 2.859 + } 2.860 + videoFrameChunk.getOutputStream().write(bytes); 2.861 + } 2.862 + break; 2.863 + } 2.864 + } 2.865 + break; 2.866 + } 2.867 + 2.868 + case JPG: { 2.869 + videoFrameChunk = new DataChunk("00dc"); 2.870 + ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); 2.871 + ImageWriteParam iwParam = iw.getDefaultWriteParam(); 2.872 + iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); 2.873 + iwParam.setCompressionQuality(quality); 2.874 + MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); 2.875 + iw.setOutput(imgOut); 2.876 + IIOImage img = new IIOImage(image, null, null); 2.877 + iw.write(null, img, iwParam); 2.878 + iw.dispose(); 2.879 + break; 2.880 + } 2.881 + case PNG: 2.882 + default: { 2.883 + videoFrameChunk = new DataChunk("00dc"); 2.884 + ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); 2.885 + ImageWriteParam iwParam = iw.getDefaultWriteParam(); 2.886 + MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); 2.887 + iw.setOutput(imgOut); 2.888 + IIOImage img = new IIOImage(image, null, null); 2.889 + iw.write(null, img, iwParam); 2.890 + iw.dispose(); 2.891 + break; 2.892 + } 2.893 + } 2.894 + long length = getRelativeStreamPosition() - offset; 2.895 + moviChunk.add(videoFrameChunk); 2.896 + videoFrameChunk.finish(); 2.897 + 2.898 + videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); 2.899 + if (getRelativeStreamPosition() > 1L << 32) { 2.900 + throw new IOException("AVI file is larger than 4 GB"); 2.901 + } 2.902 + } 2.903 + 2.904 + /** 2.905 + * Writes a frame from a file to the video track. 2.906 + * <p> 2.907 + * This method does not inspect the contents of the file. 2.908 + * For example, Its your responsibility to only add JPG files if you have 2.909 + * chosen the JPEG video format. 2.910 + * <p> 2.911 + * If you add all frames from files or from input streams, then you 2.912 + * have to explicitly set the dimension of the video track before you 2.913 + * call finish() or close(). 2.914 + * 2.915 + * @param file The file which holds the image data. 2.916 + * 2.917 + * @throws IllegalStateException if the duration is less than 1. 2.918 + * @throws IOException if writing the image failed. 2.919 + */ 2.920 + public void writeFrame(File file) throws IOException { 2.921 + FileInputStream in = null; 2.922 + try { 2.923 + in = new FileInputStream(file); 2.924 + writeFrame(in); 2.925 + } finally { 2.926 + if (in != null) { 2.927 + in.close(); 2.928 + } 2.929 + } 2.930 + } 2.931 + 2.932 + /** 2.933 + * Writes a frame to the video track. 2.934 + * <p> 2.935 + * This method does not inspect the contents of the file. 2.936 + * For example, its your responsibility to only add JPG files if you have 2.937 + * chosen the JPEG video format. 2.938 + * <p> 2.939 + * If you add all frames from files or from input streams, then you 2.940 + * have to explicitly set the dimension of the video track before you 2.941 + * call finish() or close(). 2.942 + * 2.943 + * @param in The input stream which holds the image data. 2.944 + * 2.945 + * @throws IllegalArgumentException if the duration is less than 1. 2.946 + * @throws IOException if writing the image failed. 2.947 + */ 2.948 + public void writeFrame(InputStream in) throws IOException { 2.949 + ensureOpen(); 2.950 + ensureStarted(); 2.951 + 2.952 + DataChunk videoFrameChunk = new DataChunk( 2.953 + videoFormat == VideoFormat.RAW ? "00db" : "00dc"); 2.954 + moviChunk.add(videoFrameChunk); 2.955 + OutputStream mdatOut = videoFrameChunk.getOutputStream(); 2.956 + long offset = getRelativeStreamPosition(); 2.957 + byte[] buf = new byte[512]; 2.958 + int len; 2.959 + while ((len = in.read(buf)) != -1) { 2.960 + mdatOut.write(buf, 0, len); 2.961 + } 2.962 + long length = getRelativeStreamPosition() - offset; 2.963 + videoFrameChunk.finish(); 2.964 + videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); 2.965 + if (getRelativeStreamPosition() > 1L << 32) { 2.966 + throw new IOException("AVI file is larger than 4 GB"); 2.967 + } 2.968 + } 2.969 + 2.970 + /** 2.971 + * Closes the movie file as well as the stream being filtered. 2.972 + * 2.973 + * @exception IOException if an I/O error has occurred 2.974 + */ 2.975 + public void close() throws IOException { 2.976 + if (state == States.STARTED) { 2.977 + finish(); 2.978 + } 2.979 + if (state != States.CLOSED) { 2.980 + out.close(); 2.981 + state = States.CLOSED; 2.982 + } 2.983 + } 2.984 + 2.985 + /** 2.986 + * Finishes writing the contents of the AVI output stream without closing 2.987 + * the underlying stream. Use this method when applying multiple filters 2.988 + * in succession to the same output stream. 2.989 + * 2.990 + * @exception IllegalStateException if the dimension of the video track 2.991 + * has not been specified or determined yet. 2.992 + * @exception IOException if an I/O exception has occurred 2.993 + */ 2.994 + public void finish() throws IOException { 2.995 + ensureOpen(); 2.996 + if (state != States.FINISHED) { 2.997 + if (imgWidth == -1 || imgHeight == -1) { 2.998 + throw new IllegalStateException("image width and height must be specified"); 2.999 + } 2.1000 + 2.1001 + moviChunk.finish(); 2.1002 + writeEpilog(); 2.1003 + state = States.FINISHED; 2.1004 + imgWidth = imgHeight = -1; 2.1005 + } 2.1006 + } 2.1007 + 2.1008 + /** 2.1009 + * Check to make sure that this stream has not been closed 2.1010 + */ 2.1011 + private void ensureOpen() throws IOException { 2.1012 + if (state == States.CLOSED) { 2.1013 + throw new IOException("Stream closed"); 2.1014 + } 2.1015 + } 2.1016 + 2.1017 + /** Gets the position relative to the beginning of the QuickTime stream. 2.1018 + * <p> 2.1019 + * Usually this value is equal to the stream position of the underlying 2.1020 + * ImageOutputStream, but can be larger if the underlying stream already 2.1021 + * contained data. 2.1022 + * 2.1023 + * @return The relative stream position. 2.1024 + * @throws IOException 2.1025 + */ 2.1026 + private long getRelativeStreamPosition() throws IOException { 2.1027 + return out.getStreamPosition() - streamOffset; 2.1028 + } 2.1029 + 2.1030 + /** Seeks relative to the beginning of the QuickTime stream. 2.1031 + * <p> 2.1032 + * Usually this equal to seeking in the underlying ImageOutputStream, but 2.1033 + * can be different if the underlying stream already contained data. 2.1034 + * 2.1035 + */ 2.1036 + private void seekRelative(long newPosition) throws IOException { 2.1037 + out.seek(newPosition + streamOffset); 2.1038 + } 2.1039 + 2.1040 + private void writeProlog() throws IOException { 2.1041 + // The file has the following structure: 2.1042 + // 2.1043 + // .RIFF AVI 2.1044 + // ..avih (AVI Header Chunk) 2.1045 + // ..LIST strl 2.1046 + // ...strh (Stream Header Chunk) 2.1047 + // ...strf (Stream Format Chunk) 2.1048 + // ..LIST movi 2.1049 + // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) 2.1050 + // ..idx1 (List of video data chunks and their location in the file) 2.1051 + 2.1052 + // The RIFF AVI Chunk holds the complete movie 2.1053 + aviChunk = new CompositeChunk("RIFF", "AVI "); 2.1054 + CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); 2.1055 + 2.1056 + // Write empty AVI Main Header Chunk - we fill the data in later 2.1057 + aviChunk.add(hdrlChunk); 2.1058 + avihChunk = new FixedSizeDataChunk("avih", 56); 2.1059 + avihChunk.seekToEndOfChunk(); 2.1060 + hdrlChunk.add(avihChunk); 2.1061 + 2.1062 + CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); 2.1063 + hdrlChunk.add(strlChunk); 2.1064 + 2.1065 + // Write empty AVI Stream Header Chunk - we fill the data in later 2.1066 + strhChunk = new FixedSizeDataChunk("strh", 56); 2.1067 + strhChunk.seekToEndOfChunk(); 2.1068 + strlChunk.add(strhChunk); 2.1069 + strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); 2.1070 + strfChunk.seekToEndOfChunk(); 2.1071 + strlChunk.add(strfChunk); 2.1072 + 2.1073 + moviChunk = new CompositeChunk("LIST", "movi"); 2.1074 + aviChunk.add(moviChunk); 2.1075 + 2.1076 + 2.1077 + } 2.1078 + 2.1079 + private void writeEpilog() throws IOException { 2.1080 + // Compute values 2.1081 + int duration = 0; 2.1082 + for (Sample s : videoFrames) { 2.1083 + duration += s.duration; 2.1084 + } 2.1085 + long bufferSize = 0; 2.1086 + for (Sample s : videoFrames) { 2.1087 + if (s.length > bufferSize) { 2.1088 + bufferSize = s.length; 2.1089 + } 2.1090 + } 2.1091 + 2.1092 + 2.1093 + DataChunkOutputStream d; 2.1094 + 2.1095 + /* Create Idx1 Chunk and write data 2.1096 + * ------------- 2.1097 + typedef struct _avioldindex { 2.1098 + FOURCC fcc; 2.1099 + DWORD cb; 2.1100 + struct _avioldindex_entry { 2.1101 + DWORD dwChunkId; 2.1102 + DWORD dwFlags; 2.1103 + DWORD dwOffset; 2.1104 + DWORD dwSize; 2.1105 + } aIndex[]; 2.1106 + } AVIOLDINDEX; 2.1107 + */ 2.1108 + DataChunk idx1Chunk = new DataChunk("idx1"); 2.1109 + aviChunk.add(idx1Chunk); 2.1110 + d = idx1Chunk.getOutputStream(); 2.1111 + long moviListOffset = moviChunk.offset + 8; 2.1112 + //moviListOffset = 0; 2.1113 + for (Sample f : videoFrames) { 2.1114 + 2.1115 + d.writeType(f.chunkType); // dwChunkId 2.1116 + // Specifies a FOURCC that identifies a stream in the AVI file. The 2.1117 + // FOURCC must have the form 'xxyy' where xx is the stream number and yy 2.1118 + // is a two-character code that identifies the contents of the stream: 2.1119 + // 2.1120 + // Two-character code Description 2.1121 + // db Uncompressed video frame 2.1122 + // dc Compressed video frame 2.1123 + // pc Palette change 2.1124 + // wb Audio data 2.1125 + 2.1126 + d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// 2.1127 + | (f.isSync ? 0x10 : 0x0)); // dwFlags 2.1128 + // Specifies a bitwise combination of zero or more of the following 2.1129 + // flags: 2.1130 + // 2.1131 + // Value Name Description 2.1132 + // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. 2.1133 + // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. 2.1134 + // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the 2.1135 + // stream. For example, this flag should be set for 2.1136 + // palette changes. 2.1137 + 2.1138 + d.writeUInt(f.offset - moviListOffset); // dwOffset 2.1139 + // Specifies the location of the data chunk in the file. The value 2.1140 + // should be specified as an offset, in bytes, from the start of the 2.1141 + // 'movi' list; however, in some AVI files it is given as an offset from 2.1142 + // the start of the file. 2.1143 + 2.1144 + d.writeUInt(f.length); // dwSize 2.1145 + // Specifies the size of the data chunk, in bytes. 2.1146 + } 2.1147 + idx1Chunk.finish(); 2.1148 + 2.1149 + /* Write Data into AVI Main Header Chunk 2.1150 + * ------------- 2.1151 + * The AVIMAINHEADER structure defines global information in an AVI file. 2.1152 + * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx 2.1153 + typedef struct _avimainheader { 2.1154 + FOURCC fcc; 2.1155 + DWORD cb; 2.1156 + DWORD dwMicroSecPerFrame; 2.1157 + DWORD dwMaxBytesPerSec; 2.1158 + DWORD dwPaddingGranularity; 2.1159 + DWORD dwFlags; 2.1160 + DWORD dwTotalFrames; 2.1161 + DWORD dwInitialFrames; 2.1162 + DWORD dwStreams; 2.1163 + DWORD dwSuggestedBufferSize; 2.1164 + DWORD dwWidth; 2.1165 + DWORD dwHeight; 2.1166 + DWORD dwReserved[4]; 2.1167 + } AVIMAINHEADER; */ 2.1168 + avihChunk.seekToStartOfData(); 2.1169 + d = avihChunk.getOutputStream(); 2.1170 + 2.1171 + d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame 2.1172 + // Specifies the number of microseconds between frames. 2.1173 + // This value indicates the overall timing for the file. 2.1174 + 2.1175 + d.writeUInt(0); // dwMaxBytesPerSec 2.1176 + // Specifies the approximate maximum data rate of the file. 2.1177 + // This value indicates the number of bytes per second the system 2.1178 + // must handle to present an AVI sequence as specified by the other 2.1179 + // parameters contained in the main header and stream header chunks. 2.1180 + 2.1181 + d.writeUInt(0); // dwPaddingGranularity 2.1182 + // Specifies the alignment for data, in bytes. Pad the data to multiples 2.1183 + // of this value. 2.1184 + 2.1185 + d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) 2.1186 + // Contains a bitwise combination of zero or more of the following 2.1187 + // flags: 2.1188 + // 2.1189 + // Value Name Description 2.1190 + // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. 2.1191 + // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the 2.1192 + // index, rather than the physical ordering of the 2.1193 + // chunks in the file, to determine the order of 2.1194 + // presentation of the data. For example, this flag 2.1195 + // could be used to create a list of frames for 2.1196 + // editing. 2.1197 + // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. 2.1198 + // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially 2.1199 + // allocated file used for capturing real-time 2.1200 + // video. Applications should warn the user before 2.1201 + // writing over a file with this flag set because 2.1202 + // the user probably defragmented this file. 2.1203 + // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted 2.1204 + // data and software. When this flag is used, 2.1205 + // software should not permit the data to be 2.1206 + // duplicated. 2.1207 + 2.1208 + d.writeUInt(videoFrames.size()); // dwTotalFrames 2.1209 + // Specifies the total number of frames of data in the file. 2.1210 + 2.1211 + d.writeUInt(0); // dwInitialFrames 2.1212 + // Specifies the initial frame for interleaved files. Noninterleaved 2.1213 + // files should specify zero. If you are creating interleaved files, 2.1214 + // specify the number of frames in the file prior to the initial frame 2.1215 + // of the AVI sequence in this member. 2.1216 + // To give the audio driver enough audio to work with, the audio data in 2.1217 + // an interleaved file must be skewed from the video data. Typically, 2.1218 + // the audio data should be moved forward enough frames to allow 2.1219 + // approximately 0.75 seconds of audio data to be preloaded. The 2.1220 + // dwInitialRecords member should be set to the number of frames the 2.1221 + // audio is skewed. Also set the same value for the dwInitialFrames 2.1222 + // member of the AVISTREAMHEADER structure in the audio stream header 2.1223 + 2.1224 + d.writeUInt(1); // dwStreams 2.1225 + // Specifies the number of streams in the file. For example, a file with 2.1226 + // audio and video has two streams. 2.1227 + 2.1228 + d.writeUInt(bufferSize); // dwSuggestedBufferSize 2.1229 + // Specifies the suggested buffer size for reading the file. Generally, 2.1230 + // this size should be large enough to contain the largest chunk in the 2.1231 + // file. If set to zero, or if it is too small, the playback software 2.1232 + // will have to reallocate memory during playback, which will reduce 2.1233 + // performance. For an interleaved file, the buffer size should be large 2.1234 + // enough to read an entire record, and not just a chunk. 2.1235 + 2.1236 + 2.1237 + d.writeUInt(imgWidth); // dwWidth 2.1238 + // Specifies the width of the AVI file in pixels. 2.1239 + 2.1240 + d.writeUInt(imgHeight); // dwHeight 2.1241 + // Specifies the height of the AVI file in pixels. 2.1242 + 2.1243 + d.writeUInt(0); // dwReserved[0] 2.1244 + d.writeUInt(0); // dwReserved[1] 2.1245 + d.writeUInt(0); // dwReserved[2] 2.1246 + d.writeUInt(0); // dwReserved[3] 2.1247 + // Reserved. Set this array to zero. 2.1248 + 2.1249 + /* Write Data into AVI Stream Header Chunk 2.1250 + * ------------- 2.1251 + * The AVISTREAMHEADER structure contains information about one stream 2.1252 + * in an AVI file. 2.1253 + * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx 2.1254 + typedef struct _avistreamheader { 2.1255 + FOURCC fcc; 2.1256 + DWORD cb; 2.1257 + FOURCC fccType; 2.1258 + FOURCC fccHandler; 2.1259 + DWORD dwFlags; 2.1260 + WORD wPriority; 2.1261 + WORD wLanguage; 2.1262 + DWORD dwInitialFrames; 2.1263 + DWORD dwScale; 2.1264 + DWORD dwRate; 2.1265 + DWORD dwStart; 2.1266 + DWORD dwLength; 2.1267 + DWORD dwSuggestedBufferSize; 2.1268 + DWORD dwQuality; 2.1269 + DWORD dwSampleSize; 2.1270 + struct { 2.1271 + short int left; 2.1272 + short int top; 2.1273 + short int right; 2.1274 + short int bottom; 2.1275 + } rcFrame; 2.1276 + } AVISTREAMHEADER; 2.1277 + */ 2.1278 + strhChunk.seekToStartOfData(); 2.1279 + d = strhChunk.getOutputStream(); 2.1280 + d.writeType("vids"); // fccType - vids for video stream 2.1281 + // Contains a FOURCC that specifies the type of the data contained in 2.1282 + // the stream. The following standard AVI values for video and audio are 2.1283 + // defined: 2.1284 + // 2.1285 + // FOURCC Description 2.1286 + // 'auds' Audio stream 2.1287 + // 'mids' MIDI stream 2.1288 + // 'txts' Text stream 2.1289 + // 'vids' Video stream 2.1290 + 2.1291 + switch (videoFormat) { 2.1292 + case RAW: 2.1293 + d.writeType("DIB "); // fccHandler - DIB for Raw RGB 2.1294 + break; 2.1295 + case RLE: 2.1296 + d.writeType("RLE "); // fccHandler - Microsoft RLE 2.1297 + break; 2.1298 + case JPG: 2.1299 + d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG 2.1300 + break; 2.1301 + case PNG: 2.1302 + default: 2.1303 + d.writeType("png "); // fccHandler - png for PNG 2.1304 + break; 2.1305 + } 2.1306 + // Optionally, contains a FOURCC that identifies a specific data 2.1307 + // handler. The data handler is the preferred handler for the stream. 2.1308 + // For audio and video streams, this specifies the codec for decoding 2.1309 + // the stream. 2.1310 + 2.1311 + if (imgDepth <= 8) { 2.1312 + d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES 2.1313 + } else { 2.1314 + d.writeUInt(0); // dwFlags 2.1315 + } 2.1316 + 2.1317 + // Contains any flags for the data stream. The bits in the high-order 2.1318 + // word of these flags are specific to the type of data contained in the 2.1319 + // stream. The following standard flags are defined: 2.1320 + // 2.1321 + // Value Name Description 2.1322 + // AVISF_DISABLED 0x00000001 Indicates this stream should not 2.1323 + // be enabled by default. 2.1324 + // AVISF_VIDEO_PALCHANGES 0x00010000 2.1325 + // Indicates this video stream contains 2.1326 + // palette changes. This flag warns the playback 2.1327 + // software that it will need to animate the 2.1328 + // palette. 2.1329 + 2.1330 + d.writeUShort(0); // wPriority 2.1331 + // Specifies priority of a stream type. For example, in a file with 2.1332 + // multiple audio streams, the one with the highest priority might be 2.1333 + // the default stream. 2.1334 + 2.1335 + d.writeUShort(0); // wLanguage 2.1336 + // Language tag. 2.1337 + 2.1338 + d.writeUInt(0); // dwInitialFrames 2.1339 + // Specifies how far audio data is skewed ahead of the video frames in 2.1340 + // interleaved files. Typically, this is about 0.75 seconds. If you are 2.1341 + // creating interleaved files, specify the number of frames in the file 2.1342 + // prior to the initial frame of the AVI sequence in this member. For 2.1343 + // more information, see the remarks for the dwInitialFrames member of 2.1344 + // the AVIMAINHEADER structure. 2.1345 + 2.1346 + d.writeUInt(timeScale); // dwScale 2.1347 + // Used with dwRate to specify the time scale that this stream will use. 2.1348 + // Dividing dwRate by dwScale gives the number of samples per second. 2.1349 + // For video streams, this is the frame rate. For audio streams, this 2.1350 + // rate corresponds to the time needed to play nBlockAlign bytes of 2.1351 + // audio, which for PCM audio is the just the sample rate. 2.1352 + 2.1353 + d.writeUInt(frameRate); // dwRate 2.1354 + // See dwScale. 2.1355 + 2.1356 + d.writeUInt(0); // dwStart 2.1357 + // Specifies the starting time for this stream. The units are defined by 2.1358 + // the dwRate and dwScale members in the main file header. Usually, this 2.1359 + // is zero, but it can specify a delay time for a stream that does not 2.1360 + // start concurrently with the file. 2.1361 + 2.1362 + d.writeUInt(videoFrames.size()); // dwLength 2.1363 + // Specifies the length of this stream. The units are defined by the 2.1364 + // dwRate and dwScale members of the stream's header. 2.1365 + 2.1366 + d.writeUInt(bufferSize); // dwSuggestedBufferSize 2.1367 + // Specifies how large a buffer should be used to read this stream. 2.1368 + // Typically, this contains a value corresponding to the largest chunk 2.1369 + // present in the stream. Using the correct buffer size makes playback 2.1370 + // more efficient. Use zero if you do not know the correct buffer size. 2.1371 + 2.1372 + d.writeInt(-1); // dwQuality 2.1373 + // Specifies an indicator of the quality of the data in the stream. 2.1374 + // Quality is represented as a number between 0 and 10,000. 2.1375 + // For compressed data, this typically represents the value of the 2.1376 + // quality parameter passed to the compression software. If set to –1, 2.1377 + // drivers use the default quality value. 2.1378 + 2.1379 + d.writeUInt(0); // dwSampleSize 2.1380 + // Specifies the size of a single sample of data. This is set to zero 2.1381 + // if the samples can vary in size. If this number is nonzero, then 2.1382 + // multiple samples of data can be grouped into a single chunk within 2.1383 + // the file. If it is zero, each sample of data (such as a video frame) 2.1384 + // must be in a separate chunk. For video streams, this number is 2.1385 + // typically zero, although it can be nonzero if all video frames are 2.1386 + // the same size. For audio streams, this number should be the same as 2.1387 + // the nBlockAlign member of the WAVEFORMATEX structure describing the 2.1388 + // audio. 2.1389 + 2.1390 + d.writeUShort(0); // rcFrame.left 2.1391 + d.writeUShort(0); // rcFrame.top 2.1392 + d.writeUShort(imgWidth); // rcFrame.right 2.1393 + d.writeUShort(imgHeight); // rcFrame.bottom 2.1394 + // Specifies the destination rectangle for a text or video stream within 2.1395 + // the movie rectangle specified by the dwWidth and dwHeight members of 2.1396 + // the AVI main header structure. The rcFrame member is typically used 2.1397 + // in support of multiple video streams. Set this rectangle to the 2.1398 + // coordinates corresponding to the movie rectangle to update the whole 2.1399 + // movie rectangle. Units for this member are pixels. The upper-left 2.1400 + // corner of the destination rectangle is relative to the upper-left 2.1401 + // corner of the movie rectangle. 2.1402 + 2.1403 + /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk 2.1404 + /* ------------- 2.1405 + * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx 2.1406 + typedef struct tagBITMAPINFOHEADER { 2.1407 + DWORD biSize; 2.1408 + LONG biWidth; 2.1409 + LONG biHeight; 2.1410 + WORD biPlanes; 2.1411 + WORD biBitCount; 2.1412 + DWORD biCompression; 2.1413 + DWORD biSizeImage; 2.1414 + LONG biXPelsPerMeter; 2.1415 + LONG biYPelsPerMeter; 2.1416 + DWORD biClrUsed; 2.1417 + DWORD biClrImportant; 2.1418 + } BITMAPINFOHEADER; 2.1419 + */ 2.1420 + strfChunk.seekToStartOfData(); 2.1421 + d = strfChunk.getOutputStream(); 2.1422 + d.writeUInt(40); // biSize 2.1423 + // Specifies the number of bytes required by the structure. This value 2.1424 + // does not include the size of the color table or the size of the color 2.1425 + // masks, if they are appended to the end of structure. 2.1426 + 2.1427 + d.writeInt(imgWidth); // biWidth 2.1428 + // Specifies the width of the bitmap, in pixels. 2.1429 + 2.1430 + d.writeInt(imgHeight); // biHeight 2.1431 + // Specifies the height of the bitmap, in pixels. 2.1432 + // 2.1433 + // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is 2.1434 + // a bottom-up DIB with the origin at the lower left corner. If biHeight 2.1435 + // is negative, the bitmap is a top-down DIB with the origin at the 2.1436 + // upper left corner. 2.1437 + // For YUV bitmaps, the bitmap is always top-down, regardless of the 2.1438 + // sign of biHeight. Decoders should offer YUV formats with postive 2.1439 + // biHeight, but for backward compatibility they should accept YUV 2.1440 + // formats with either positive or negative biHeight. 2.1441 + // For compressed formats, biHeight must be positive, regardless of 2.1442 + // image orientation. 2.1443 + 2.1444 + d.writeShort(1); // biPlanes 2.1445 + // Specifies the number of planes for the target device. This value must 2.1446 + // be set to 1. 2.1447 + 2.1448 + d.writeShort(imgDepth); // biBitCount 2.1449 + // Specifies the number of bits per pixel (bpp). For uncompressed 2.1450 + // formats, this value is the average number of bits per pixel. For 2.1451 + // compressed formats, this value is the implied bit depth of the 2.1452 + // uncompressed image, after the image has been decoded. 2.1453 + 2.1454 + switch (videoFormat) { 2.1455 + case RAW: 2.1456 + default: 2.1457 + d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB 2.1458 + break; 2.1459 + case RLE: 2.1460 + if (imgDepth == 8) { 2.1461 + d.writeInt(1); // biCompression - BI_RLE8 2.1462 + } else if (imgDepth == 4) { 2.1463 + d.writeInt(2); // biCompression - BI_RLE4 2.1464 + } else { 2.1465 + throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); 2.1466 + } 2.1467 + break; 2.1468 + case JPG: 2.1469 + d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG 2.1470 + break; 2.1471 + case PNG: 2.1472 + d.writeType("png "); // biCompression - png for PNG 2.1473 + break; 2.1474 + } 2.1475 + // For compressed video and YUV formats, this member is a FOURCC code, 2.1476 + // specified as a DWORD in little-endian order. For example, YUYV video 2.1477 + // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC 2.1478 + // Codes. 2.1479 + // 2.1480 + // For uncompressed RGB formats, the following values are possible: 2.1481 + // 2.1482 + // Value Description 2.1483 + // BI_RGB 0x00000000 Uncompressed RGB. 2.1484 + // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. 2.1485 + // Valid for 16-bpp and 32-bpp bitmaps. 2.1486 + // 2.1487 + // Note that BI_JPG and BI_PNG are not valid video formats. 2.1488 + // 2.1489 + // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is 2.1490 + // always RGB 555. If biCompression equals BI_BITFIELDS, the format is 2.1491 + // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE 2.1492 + // structure to determine the specific RGB type. 2.1493 + 2.1494 + switch (videoFormat) { 2.1495 + case RAW: 2.1496 + d.writeInt(0); // biSizeImage 2.1497 + break; 2.1498 + case RLE: 2.1499 + case JPG: 2.1500 + case PNG: 2.1501 + default: 2.1502 + if (imgDepth == 4) { 2.1503 + d.writeInt(imgWidth * imgHeight / 2); // biSizeImage 2.1504 + } else { 2.1505 + int bytesPerPixel = Math.max(1, imgDepth / 8); 2.1506 + d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage 2.1507 + } 2.1508 + break; 2.1509 + } 2.1510 + // Specifies the size, in bytes, of the image. This can be set to 0 for 2.1511 + // uncompressed RGB bitmaps. 2.1512 + 2.1513 + d.writeInt(0); // biXPelsPerMeter 2.1514 + // Specifies the horizontal resolution, in pixels per meter, of the 2.1515 + // target device for the bitmap. 2.1516 + 2.1517 + d.writeInt(0); // biYPelsPerMeter 2.1518 + // Specifies the vertical resolution, in pixels per meter, of the target 2.1519 + // device for the bitmap. 2.1520 + 2.1521 + d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed 2.1522 + // Specifies the number of color indices in the color table that are 2.1523 + // actually used by the bitmap. 2.1524 + 2.1525 + d.writeInt(0); // biClrImportant 2.1526 + // Specifies the number of color indices that are considered important 2.1527 + // for displaying the bitmap. If this value is zero, all colors are 2.1528 + // important. 2.1529 + 2.1530 + if (palette != null) { 2.1531 + for (int i = 0, n = palette.getMapSize(); i < n; ++i) { 2.1532 + /* 2.1533 + * typedef struct tagRGBQUAD { 2.1534 + BYTE rgbBlue; 2.1535 + BYTE rgbGreen; 2.1536 + BYTE rgbRed; 2.1537 + BYTE rgbReserved; // This member is reserved and must be zero. 2.1538 + } RGBQUAD; 2.1539 + */ 2.1540 + d.write(palette.getBlue(i)); 2.1541 + d.write(palette.getGreen(i)); 2.1542 + d.write(palette.getRed(i)); 2.1543 + d.write(0); 2.1544 + } 2.1545 + } 2.1546 + 2.1547 + 2.1548 + // ----------------- 2.1549 + aviChunk.finish(); 2.1550 + } 2.1551 +}
3.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 3.2 +++ b/src/com/aurellem/capture/AVIVideoRecorder.java Tue Oct 25 11:55:55 2011 -0700 3.3 @@ -0,0 +1,46 @@ 3.4 +package com.aurellem.capture; 3.5 + 3.6 +import java.awt.image.BufferedImage; 3.7 +import java.io.File; 3.8 +import java.io.IOException; 3.9 + 3.10 + 3.11 +public class AVIVideoRecorder extends AbstractVideoRecorder{ 3.12 + 3.13 + AVIOutputStream out = null; 3.14 + boolean videoReady = false; 3.15 + BufferedImage frame; 3.16 + 3.17 + public AVIVideoRecorder(File output) throws IOException { 3.18 + super(output); 3.19 + this.out = new AVIOutputStream(output, AVIOutputStream.VideoFormat.PNG, 24); 3.20 + this.out.setVideoCompressionQuality(1.0f); 3.21 + } 3.22 + 3.23 + 3.24 + public void initVideo (){ 3.25 + frame = new BufferedImage( 3.26 + width, height, 3.27 + BufferedImage.TYPE_INT_RGB); 3.28 + out.setFrameRate((int) Math.round(this.fps)); 3.29 + out.setTimeScale(1); 3.30 + out.setVideoDimension(width, height); 3.31 + this.videoReady = true; 3.32 + } 3.33 + 3.34 + public void record(BufferedImage rawFrame) { 3.35 + if (!videoReady){initVideo();} 3.36 + this.frame.getGraphics().drawImage(rawFrame, 0, 0, null); 3.37 + try {out.writeFrame(frame);} 3.38 + catch (IOException e){e.printStackTrace();} 3.39 + } 3.40 + 3.41 + public void finish() { 3.42 + System.out.println("I'm finished! <3"); 3.43 + try {out.close();} 3.44 + catch (IOException e) {e.printStackTrace();} 3.45 + } 3.46 + 3.47 + 3.48 + 3.49 +}
4.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 4.2 +++ b/src/com/aurellem/capture/AbstractVideoRecorder.java Tue Oct 25 11:55:55 2011 -0700 4.3 @@ -0,0 +1,144 @@ 4.4 +package com.aurellem.capture; 4.5 + 4.6 +import java.awt.image.BufferedImage; 4.7 +import java.io.File; 4.8 +import java.io.IOException; 4.9 +import java.nio.ByteBuffer; 4.10 + 4.11 +import com.jme3.app.Application; 4.12 +import com.jme3.app.state.AppState; 4.13 +import com.jme3.app.state.AppStateManager; 4.14 +import com.jme3.post.SceneProcessor; 4.15 +import com.jme3.renderer.Camera; 4.16 +import com.jme3.renderer.RenderManager; 4.17 +import com.jme3.renderer.ViewPort; 4.18 +import com.jme3.renderer.queue.RenderQueue; 4.19 +import com.jme3.system.IsoTimer; 4.20 +import com.jme3.texture.FrameBuffer; 4.21 +import com.jme3.util.BufferUtils; 4.22 +import com.jme3.util.Screenshots; 4.23 + 4.24 +/** 4.25 + * <code>VideoProcessor</code> copies the frames it receives to video. 4.26 + * To ensure smooth video at a constant framerate, you should set your 4.27 + * application's timer to a new {@link IsoTimer}. This class will 4.28 + * auto-determine the framerate of the video based on the time difference 4.29 + * between the first two frames it receives, although you can manually set 4.30 + * the framerate by calling <code>setFps(newFramerate)</code>. Be sure to 4.31 + * place this processor *after* any other processors whose effects you want 4.32 + * to be included in the output video. You can attach multiple 4.33 + * <code>VideoProcessor</code>s to the same <code>ViewPort</code>. 4.34 + * 4.35 + * For example, 4.36 + * <code> 4.37 + * someViewPort.addProcessor(new VideoProcessor(file1)); 4.38 + * someViewPort.addProcessor(someShadowRenderer); 4.39 + * someViewPort.addProcessor(new VideoProcessor(file2)); 4.40 + * </code> 4.41 + * 4.42 + * will output a video without shadows to <code>file1</code> and a video 4.43 + * with shadows to <code>file2</code> 4.44 + * 4.45 + * @author Robert McIntyre 4.46 + * 4.47 + */ 4.48 + 4.49 +public abstract class AbstractVideoRecorder 4.50 + implements SceneProcessor, IVideoRecorder, AppState{ 4.51 + 4.52 + final File output; 4.53 + Camera camera; 4.54 + int width; 4.55 + int height; 4.56 + String targetFileName; 4.57 + FrameBuffer frameBuffer; 4.58 + Double fps = null; 4.59 + RenderManager renderManager; 4.60 + ByteBuffer byteBuffer; 4.61 + BufferedImage rawFrame; 4.62 + boolean isInitilized = false; 4.63 + boolean paused = false; 4.64 + 4.65 + public AbstractVideoRecorder(File output) throws IOException { 4.66 + this.output = output; 4.67 + this.targetFileName = this.output.getCanonicalPath(); 4.68 + } 4.69 + 4.70 + 4.71 + public double getFps() {return this.fps;} 4.72 + 4.73 + public AbstractVideoRecorder setFps(double fps) { 4.74 + this.fps = fps; 4.75 + return this; 4.76 + } 4.77 + 4.78 + public void initialize(RenderManager rm, ViewPort viewPort) { 4.79 + Camera camera = viewPort.getCamera(); 4.80 + this.width = camera.getWidth(); 4.81 + this.height = camera.getHeight(); 4.82 + 4.83 + rawFrame = new BufferedImage(width, height, 4.84 + BufferedImage.TYPE_4BYTE_ABGR); 4.85 + byteBuffer = BufferUtils.createByteBuffer(width * height * 4 ); 4.86 + this.renderManager = rm; 4.87 + this.isInitilized = true; 4.88 + } 4.89 + 4.90 + public void reshape(ViewPort vp, int w, int h) {} 4.91 + 4.92 + public boolean isInitialized() {return this.isInitilized;} 4.93 + 4.94 + public void preFrame(float tpf) { 4.95 + if (null == this.fps){ 4.96 + this.setFps(1.0 / tpf);} 4.97 + } 4.98 + 4.99 + public void postQueue(RenderQueue rq) {} 4.100 + 4.101 + public void postFrame(FrameBuffer out) { 4.102 + if (!this.paused){ 4.103 + byteBuffer.clear(); 4.104 + renderManager.getRenderer().readFrameBuffer(out, byteBuffer); 4.105 + Screenshots.convertScreenShot(byteBuffer, rawFrame); 4.106 + record(rawFrame); 4.107 + } 4.108 + } 4.109 + 4.110 + public void cleanup(){ 4.111 + this.pause(); 4.112 + this.finish(); 4.113 + }; 4.114 + 4.115 + public void pause(){ 4.116 + this.paused = true; 4.117 + } 4.118 + 4.119 + public void start(){ 4.120 + this.paused = false; 4.121 + } 4.122 + 4.123 + // methods from AppState 4.124 + public void initialize(AppStateManager stateManager, Application app) {} 4.125 + 4.126 + public void setEnabled(boolean active) { 4.127 + if (active) {this.start();} 4.128 + else {this.pause();} 4.129 + } 4.130 + 4.131 + public boolean isEnabled() { 4.132 + return this.paused; 4.133 + } 4.134 + 4.135 + public void stateAttached(AppStateManager stateManager) {} 4.136 + 4.137 + 4.138 + public void stateDetached(AppStateManager stateManager) { 4.139 + this.pause(); 4.140 + this.finish(); 4.141 + } 4.142 + 4.143 + public void update(float tpf) {} 4.144 + public void render(RenderManager rm) {} 4.145 + public void postRender() {} 4.146 + 4.147 +}
5.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 5.2 +++ b/src/com/aurellem/capture/AudioSend.java Tue Oct 25 11:55:55 2011 -0700 5.3 @@ -0,0 +1,260 @@ 5.4 +package com.aurellem.capture; 5.5 + 5.6 +import java.lang.reflect.Field; 5.7 +import java.nio.ByteBuffer; 5.8 +import java.util.HashMap; 5.9 +import java.util.Vector; 5.10 +import java.util.concurrent.CountDownLatch; 5.11 +import java.util.logging.Level; 5.12 +import java.util.logging.Logger; 5.13 + 5.14 +import org.lwjgl.LWJGLException; 5.15 +import org.lwjgl.openal.AL; 5.16 +import org.lwjgl.openal.AL10; 5.17 +import org.lwjgl.openal.ALCdevice; 5.18 +import org.lwjgl.openal.OpenALException; 5.19 + 5.20 +import com.jme3.audio.Listener; 5.21 +import com.jme3.audio.lwjgl.LwjglAudioRenderer; 5.22 +import com.jme3.math.Vector3f; 5.23 +import com.jme3.util.BufferUtils; 5.24 + 5.25 +public class AudioSend 5.26 + extends LwjglAudioRenderer implements MultiListener { 5.27 + 5.28 + /** 5.29 + * Keeps track of all the listeners which have been registered so far. 5.30 + * The first element is <code>null</code>, which represents the zeroth 5.31 + * LWJGL listener which is created automatically. 5.32 + */ 5.33 + public Vector<Listener> listeners = new Vector<Listener>(); 5.34 + 5.35 + public void initialize(){ 5.36 + super.initialize(); 5.37 + listeners.add(null); 5.38 + } 5.39 + 5.40 + /** 5.41 + * This is to call the native methods which require the OpenAL device ID. 5.42 + * currently it is obtained through reflection. 5.43 + */ 5.44 + private long deviceID; 5.45 + 5.46 + /** 5.47 + * To ensure that <code>deviceID<code> and <code>listeners<code> are 5.48 + * properly initialized before any additional listeners are added. 5.49 + */ 5.50 + private CountDownLatch latch = new CountDownLatch(1); 5.51 + 5.52 + private void waitForInit(){ 5.53 + try {latch.await();} 5.54 + catch (InterruptedException e) {e.printStackTrace();} 5.55 + } 5.56 + 5.57 + /** 5.58 + * Each listener (including the main LWJGL listener) can be registered 5.59 + * with a <code>SoundProcessor</code>, which this Renderer will call whenever 5.60 + * there is new audio data to be processed. 5.61 + */ 5.62 + public HashMap<Listener, SoundProcessor> soundProcessorMap = 5.63 + new HashMap<Listener, SoundProcessor>(); 5.64 + 5.65 + 5.66 + /** 5.67 + * Create a new slave context on the recorder device which will render all the 5.68 + * sounds in the main LWJGL context with respect to this listener. 5.69 + */ 5.70 + public void addListener(Listener l) { 5.71 + try {this.latch.await();} 5.72 + catch (InterruptedException e) {e.printStackTrace();} 5.73 + this.addListener(); 5.74 + this.listeners.add(l); 5.75 + } 5.76 + 5.77 + /** 5.78 + * Whenever new data is rendered in the perspective of this listener, 5.79 + * this Renderer will send that data to the SoundProcessor of your choosing. 5.80 + */ 5.81 + public void registerSoundProcessor(Listener l, SoundProcessor sp) { 5.82 + this.soundProcessorMap.put(l, sp); 5.83 + } 5.84 + 5.85 + /** 5.86 + * Registers a SoundProcessor for the main LWJGL context. IF all you want to 5.87 + * do is record the sound you would normally hear in your application, then 5.88 + * this is the only method you have to worry about. 5.89 + */ 5.90 + public void registerSoundProcessor(SoundProcessor sp){ 5.91 + // register a sound processor for the default listener. 5.92 + this.soundProcessorMap.put(null, sp); 5.93 + } 5.94 + 5.95 + private static final Logger logger = 5.96 + Logger.getLogger(AudioSend.class.getName()); 5.97 + 5.98 + 5.99 + //////////// Native Methods 5.100 + 5.101 + /** This establishes the LWJGL context as the context which will be copies to all 5.102 + * other contexts. It must be called before any calls to <code>addListener();</code> 5.103 + */ 5.104 + public void initDevice(){ 5.105 + ninitDevice(this.deviceID);} 5.106 + public static native void ninitDevice(long device); 5.107 + 5.108 + /** 5.109 + * The send device does not automatically process sound. This step function will cause 5.110 + * the desired number of samples to be processed for each listener. The results will then 5.111 + * be available via calls to <code>getSamples()</code> for each listener. 5.112 + * @param samples 5.113 + */ 5.114 + public void step(int samples){ 5.115 + nstep(this.deviceID, samples);} 5.116 + public static native void nstep(long device, int samples); 5.117 + 5.118 + /** 5.119 + * Retrieve the final rendered sound for a particular listener. <code>contextNum == 0</code> 5.120 + * is the main LWJGL context. 5.121 + * @param buffer 5.122 + * @param samples 5.123 + * @param contextNum 5.124 + */ 5.125 + public void getSamples(ByteBuffer buffer, int samples, int contextNum){ 5.126 + ngetSamples(this.deviceID, buffer, buffer.position(), samples, contextNum);} 5.127 + public static native void ngetSamples( 5.128 + long device, ByteBuffer buffer, int position, int samples, int contextNum); 5.129 + 5.130 + /** 5.131 + * Create an additional listener on the recorder device. The device itself will manage 5.132 + * this listener and synchronize it with the main LWJGL context. Processed sound samples 5.133 + * for this listener will be available via a call to <code>getSamples()</code> with 5.134 + * <code>contextNum</code> equal to the number of times this method has been called. 5.135 + */ 5.136 + public void addListener(){naddListener(this.deviceID);} 5.137 + public static native void naddListener(long device); 5.138 + 5.139 + /** 5.140 + * This will internally call <code>alListener3f<code> in the appropriate slave context and update 5.141 + * that context's listener's parameters. Calling this for a number greater than the current 5.142 + * number of slave contexts will have no effect. 5.143 + * @param pname 5.144 + * @param v1 5.145 + * @param v2 5.146 + * @param v3 5.147 + * @param contextNum 5.148 + */ 5.149 + public void setNthListener3f(int pname, float v1, float v2, float v3, int contextNum){ 5.150 + nsetNthListener3f(pname, v1, v2, v3, this.deviceID, contextNum);} 5.151 + public static native void 5.152 + nsetNthListener3f(int pname, float v1, float v2, float v3, long device, int contextNum); 5.153 + 5.154 + /** 5.155 + * This will internally call <code>alListenerf<code> in the appropriate slave context and update 5.156 + * that context's listener's parameters. Calling this for a number greater than the current 5.157 + * number of slave contexts will have no effect. 5.158 + * @param pname 5.159 + * @param v1 5.160 + * @param contextNum 5.161 + */ 5.162 + public void setNthListenerf(int pname, float v1, int contextNum){ 5.163 + nsetNthListenerf(pname, v1, this.deviceID, contextNum);} 5.164 + public static native void nsetNthListenerf(int pname, float v1, long device, int contextNum); 5.165 + 5.166 + /** 5.167 + * Instead of taking whatever device is available on the system, this call 5.168 + * creates the "Multiple Audio Send" device, which supports multiple listeners in a limited 5.169 + * capacity. For each listener, the device renders it not to the sound device, but 5.170 + * instead to buffers which it makes available via JNI. 5.171 + */ 5.172 + public void initInThread(){ 5.173 + try{ 5.174 + if (!AL.isCreated()){ 5.175 + AL.create("Multiple Audio Send", 44100, 60, false); 5.176 + } 5.177 + }catch (OpenALException ex){ 5.178 + logger.log(Level.SEVERE, "Failed to load audio library", ex); 5.179 + System.exit(1); 5.180 + return; 5.181 + }catch (LWJGLException ex){ 5.182 + logger.log(Level.SEVERE, "Failed to load audio library", ex); 5.183 + System.exit(1); 5.184 + return; 5.185 + } 5.186 + super.initInThread(); 5.187 + 5.188 + ALCdevice device = AL.getDevice(); 5.189 + 5.190 + // RLM: use reflection to grab the ID of our device for use later. 5.191 + try { 5.192 + Field deviceIDField; 5.193 + deviceIDField = ALCdevice.class.getDeclaredField("device"); 5.194 + deviceIDField.setAccessible(true); 5.195 + try {deviceID = (Long)deviceIDField.get(device);} 5.196 + catch (IllegalArgumentException e) {e.printStackTrace();} 5.197 + catch (IllegalAccessException e) {e.printStackTrace();} 5.198 + deviceIDField.setAccessible(false);} 5.199 + catch (SecurityException e) {e.printStackTrace();} 5.200 + catch (NoSuchFieldException e) {e.printStackTrace();} 5.201 + 5.202 + // the LWJGL context must be established as the master context before 5.203 + // any other listeners can be created on this device. 5.204 + initDevice(); 5.205 + // Now, everything is initialized, and it is safe to add more listeners. 5.206 + latch.countDown(); 5.207 + } 5.208 + 5.209 + 5.210 + public void cleanup(){ 5.211 + for(SoundProcessor sp : this.soundProcessorMap.values()){ 5.212 + sp.cleanup(); 5.213 + } 5.214 + super.cleanup(); 5.215 + } 5.216 + 5.217 + public void updateAllListeners(){ 5.218 + for (int i = 0; i < this.listeners.size(); i++){ 5.219 + Listener lis = this.listeners.get(i); 5.220 + if (null != lis){ 5.221 + Vector3f location = lis.getLocation(); 5.222 + Vector3f velocity = lis.getVelocity(); 5.223 + Vector3f orientation = lis.getUp(); 5.224 + float gain = lis.getVolume(); 5.225 + setNthListener3f(AL10.AL_POSITION, 5.226 + location.x, location.y, location.z, i); 5.227 + setNthListener3f(AL10.AL_VELOCITY, 5.228 + velocity.x, velocity.y, velocity.z, i); 5.229 + setNthListener3f(AL10.AL_ORIENTATION, 5.230 + orientation.x, orientation.y, orientation.z, i); 5.231 + setNthListenerf(AL10.AL_GAIN, gain, i); 5.232 + } 5.233 + } 5.234 + } 5.235 + 5.236 + 5.237 + public final static int BYTES_PER_SAMPLE = 4; 5.238 + private ByteBuffer buffer = BufferUtils.createByteBuffer(4096); 5.239 + 5.240 + public void dispatchAudio(float tpf){ 5.241 + int samplesToGet = (int) (tpf * 44100); 5.242 + try {latch.await();} 5.243 + catch (InterruptedException e) {e.printStackTrace();} 5.244 + step(samplesToGet); 5.245 + updateAllListeners(); 5.246 + 5.247 + for (int i = 0; i < this.listeners.size(); i++){ 5.248 + buffer.clear(); 5.249 + this.getSamples(buffer, samplesToGet, i); 5.250 + SoundProcessor sp = 5.251 + this.soundProcessorMap.get(this.listeners.get(i)); 5.252 + if (null != sp){sp.process(buffer, samplesToGet*BYTES_PER_SAMPLE);} 5.253 + } 5.254 + 5.255 + } 5.256 + 5.257 + public void update(float tpf){ 5.258 + super.update(tpf); 5.259 + dispatchAudio(tpf); 5.260 + } 5.261 + 5.262 +} 5.263 +
6.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 6.2 +++ b/src/com/aurellem/capture/Capture.java Tue Oct 25 11:55:55 2011 -0700 6.3 @@ -0,0 +1,23 @@ 6.4 +package com.aurellem.capture; 6.5 + 6.6 +import java.io.File; 6.7 +import java.io.IOException; 6.8 + 6.9 +import com.jme3.app.Application; 6.10 +import com.jme3.math.ColorRGBA; 6.11 + 6.12 +public class Capture { 6.13 + 6.14 + public static void SimpleCaptureVideo(Application app, File file) throws IOException{ 6.15 + app.getViewPort().setClearFlags(true, true, true); 6.16 + app.getViewPort().setBackgroundColor(ColorRGBA.Black); 6.17 + AVIVideoRecorder videoRecorder = new AVIVideoRecorder(file); 6.18 + app.getStateManager().attach(videoRecorder); 6.19 + app.getViewPort().addFinalProcessor(videoRecorder); 6.20 + } 6.21 + 6.22 + 6.23 + 6.24 + 6.25 + 6.26 +}
7.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 7.2 +++ b/src/com/aurellem/capture/DataChunkOutputStream.java Tue Oct 25 11:55:55 2011 -0700 7.3 @@ -0,0 +1,217 @@ 7.4 +/** 7.5 + * @(#)DataChunkOutputStream.java 1.1 2011-01-17 7.6 + * 7.7 + * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. 7.8 + * All rights reserved. 7.9 + * 7.10 + * You may not use, copy or modify this file, except in compliance with the 7.11 + * license agreement you entered into with Werner Randelshofer. 7.12 + * For details see accompanying license terms. 7.13 + */ 7.14 +package com.aurellem.capture; 7.15 + 7.16 +import java.io.*; 7.17 + 7.18 +/** 7.19 + * This output stream filter supports common data types used inside 7.20 + * of AVI RIFF Data Chunks. 7.21 + * 7.22 + * @author Werner Randelshofer 7.23 + * @version 1.1 2011-01-17 Adds functionality for blocking flush and close. 7.24 + * <br>1.0.1 2010-04-05 Removed unused constants. 7.25 + * <br>1.0 2008-08-11 Created. 7.26 + */ 7.27 +public class DataChunkOutputStream extends FilterOutputStream { 7.28 + 7.29 + /** 7.30 + * The number of bytes written to the data output stream so far. 7.31 + * If this counter overflows, it will be wrapped to Integer.MAX_VALUE. 7.32 + */ 7.33 + protected long written; 7.34 + 7.35 + /** Whether flush and close request shall be forwarded to underlying stream.*/ 7.36 + private boolean forwardFlushAndClose; 7.37 + 7.38 + public DataChunkOutputStream(OutputStream out) { 7.39 + this(out,true); 7.40 + } 7.41 + public DataChunkOutputStream(OutputStream out, boolean forwardFlushAndClose) { 7.42 + super(out); 7.43 + this.forwardFlushAndClose=forwardFlushAndClose; 7.44 + } 7.45 + 7.46 + /** 7.47 + * Writes an chunk type identifier (4 bytes). 7.48 + * @param s A string with a length of 4 characters. 7.49 + */ 7.50 + public void writeType(String s) throws IOException { 7.51 + if (s.length() != 4) { 7.52 + throw new IllegalArgumentException("type string must have 4 characters"); 7.53 + } 7.54 + 7.55 + try { 7.56 + out.write(s.getBytes("ASCII"), 0, 4); 7.57 + incCount(4); 7.58 + } catch (UnsupportedEncodingException e) { 7.59 + throw new InternalError(e.toString()); 7.60 + } 7.61 + } 7.62 + 7.63 + /** 7.64 + * Writes out a <code>byte</code> to the underlying output stream as 7.65 + * a 1-byte value. If no exception is thrown, the counter 7.66 + * <code>written</code> is incremented by <code>1</code>. 7.67 + * 7.68 + * @param v a <code>byte</code> value to be written. 7.69 + * @exception IOException if an I/O error occurs. 7.70 + * @see java.io.FilterOutputStream#out 7.71 + */ 7.72 + public final void writeByte(int v) throws IOException { 7.73 + out.write(v); 7.74 + incCount(1); 7.75 + } 7.76 + 7.77 + /** 7.78 + * Writes <code>len</code> bytes from the specified byte array 7.79 + * starting at offset <code>off</code> to the underlying output stream. 7.80 + * If no exception is thrown, the counter <code>written</code> is 7.81 + * incremented by <code>len</code>. 7.82 + * 7.83 + * @param b the data. 7.84 + * @param off the start offset in the data. 7.85 + * @param len the number of bytes to write. 7.86 + * @exception IOException if an I/O error occurs. 7.87 + * @see java.io.FilterOutputStream#out 7.88 + */ 7.89 + @Override 7.90 + public synchronized void write(byte b[], int off, int len) 7.91 + throws IOException { 7.92 + out.write(b, off, len); 7.93 + incCount(len); 7.94 + } 7.95 + 7.96 + /** 7.97 + * Writes the specified byte (the low eight bits of the argument 7.98 + * <code>b</code>) to the underlying output stream. If no exception 7.99 + * is thrown, the counter <code>written</code> is incremented by 7.100 + * <code>1</code>. 7.101 + * <p> 7.102 + * Implements the <code>write</code> method of <code>OutputStream</code>. 7.103 + * 7.104 + * @param b the <code>byte</code> to be written. 7.105 + * @exception IOException if an I/O error occurs. 7.106 + * @see java.io.FilterOutputStream#out 7.107 + */ 7.108 + @Override 7.109 + public synchronized void write(int b) throws IOException { 7.110 + out.write(b); 7.111 + incCount(1); 7.112 + } 7.113 + 7.114 + /** 7.115 + * Writes an <code>int</code> to the underlying output stream as four 7.116 + * bytes, high byte first. If no exception is thrown, the counter 7.117 + * <code>written</code> is incremented by <code>4</code>. 7.118 + * 7.119 + * @param v an <code>int</code> to be written. 7.120 + * @exception IOException if an I/O error occurs. 7.121 + * @see java.io.FilterOutputStream#out 7.122 + */ 7.123 + public void writeInt(int v) throws IOException { 7.124 + out.write((v >>> 0) & 0xff); 7.125 + out.write((v >>> 8) & 0xff); 7.126 + out.write((v >>> 16) & 0xff); 7.127 + out.write((v >>> 24) & 0xff); 7.128 + incCount(4); 7.129 + } 7.130 + 7.131 + /** 7.132 + * Writes an unsigned 32 bit integer value. 7.133 + * 7.134 + * @param v The value 7.135 + * @throws java.io.IOException 7.136 + */ 7.137 + public void writeUInt(long v) throws IOException { 7.138 + out.write((int) ((v >>> 0) & 0xff)); 7.139 + out.write((int) ((v >>> 8) & 0xff)); 7.140 + out.write((int) ((v >>> 16) & 0xff)); 7.141 + out.write((int) ((v >>> 24) & 0xff)); 7.142 + incCount(4); 7.143 + } 7.144 + 7.145 + /** 7.146 + * Writes a signed 16 bit integer value. 7.147 + * 7.148 + * @param v The value 7.149 + * @throws java.io.IOException 7.150 + */ 7.151 + public void writeShort(int v) throws IOException { 7.152 + out.write((int) ((v >>> 0) & 0xff)); 7.153 + out.write((int) ((v >> 8) & 0xff)); 7.154 + incCount(2); 7.155 + } 7.156 + 7.157 + public void writeLong(long v) throws IOException { 7.158 + out.write((int) (v >>> 0) & 0xff); 7.159 + out.write((int) (v >>> 8) & 0xff); 7.160 + out.write((int) (v >>> 16) & 0xff); 7.161 + out.write((int) (v >>> 24) & 0xff); 7.162 + out.write((int) (v >>> 32) & 0xff); 7.163 + out.write((int) (v >>> 40) & 0xff); 7.164 + out.write((int) (v >>> 48) & 0xff); 7.165 + out.write((int) (v >>> 56) & 0xff); 7.166 + incCount(8); 7.167 + } 7.168 + 7.169 + public void writeUShort(int v) throws IOException { 7.170 + out.write((int) ((v >>> 0) & 0xff)); 7.171 + out.write((int) ((v >> 8) & 0xff)); 7.172 + incCount(2); 7.173 + } 7.174 + 7.175 + /** 7.176 + * Increases the written counter by the specified value 7.177 + * until it reaches Long.MAX_VALUE. 7.178 + */ 7.179 + protected void incCount(int value) { 7.180 + long temp = written + value; 7.181 + if (temp < 0) { 7.182 + temp = Long.MAX_VALUE; 7.183 + } 7.184 + written = temp; 7.185 + } 7.186 + 7.187 + /** 7.188 + * Returns the current value of the counter <code>written</code>, 7.189 + * the number of bytes written to this data output stream so far. 7.190 + * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. 7.191 + * 7.192 + * @return the value of the <code>written</code> field. 7.193 + * @see java.io.DataOutputStream#written 7.194 + */ 7.195 + public final long size() { 7.196 + return written; 7.197 + } 7.198 + 7.199 + /** 7.200 + * Sets the value of the counter <code>written</code> to 0. 7.201 + */ 7.202 + public void clearCount() { 7.203 + written = 0; 7.204 + } 7.205 + 7.206 + @Override 7.207 + public void close() throws IOException { 7.208 + if (forwardFlushAndClose) { 7.209 + super.close(); 7.210 + } 7.211 + } 7.212 + 7.213 + @Override 7.214 + public void flush() throws IOException { 7.215 + if (forwardFlushAndClose) { 7.216 + super.flush(); 7.217 + } 7.218 + } 7.219 + 7.220 +}
8.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 8.2 +++ b/src/com/aurellem/capture/FileAudioRenderer.java Tue Oct 25 11:55:55 2011 -0700 8.3 @@ -0,0 +1,85 @@ 8.4 +package com.aurellem.capture; 8.5 + 8.6 +import com.jme3.audio.AudioData; 8.7 +import com.jme3.audio.AudioNode; 8.8 +import com.jme3.audio.AudioParam; 8.9 +import com.jme3.audio.AudioRenderer; 8.10 +import com.jme3.audio.Environment; 8.11 +import com.jme3.audio.Listener; 8.12 +import com.jme3.audio.ListenerParam; 8.13 + 8.14 +public class FileAudioRenderer implements AudioRenderer{ 8.15 + 8.16 + 8.17 + public void setListener(Listener listener) { 8.18 + // TODO Auto-generated method stub 8.19 + 8.20 + } 8.21 + 8.22 + 8.23 + public void setEnvironment(Environment env) { 8.24 + // TODO Auto-generated method stub 8.25 + 8.26 + } 8.27 + 8.28 + @Override 8.29 + public void playSourceInstance(AudioNode src) { 8.30 + // TODO Auto-generated method stub 8.31 + 8.32 + } 8.33 + 8.34 + @Override 8.35 + public void playSource(AudioNode src) { 8.36 + // TODO Auto-generated method stub 8.37 + 8.38 + } 8.39 + 8.40 + @Override 8.41 + public void pauseSource(AudioNode src) { 8.42 + // TODO Auto-generated method stub 8.43 + 8.44 + } 8.45 + 8.46 + @Override 8.47 + public void stopSource(AudioNode src) { 8.48 + // TODO Auto-generated method stub 8.49 + 8.50 + } 8.51 + 8.52 + @Override 8.53 + public void updateSourceParam(AudioNode src, AudioParam param) { 8.54 + // TODO Auto-generated method stub 8.55 + 8.56 + } 8.57 + 8.58 + @Override 8.59 + public void updateListenerParam(Listener listener, ListenerParam param) { 8.60 + // TODO Auto-generated method stub 8.61 + 8.62 + } 8.63 + 8.64 + @Override 8.65 + public void deleteAudioData(AudioData ad) { 8.66 + // TODO Auto-generated method stub 8.67 + 8.68 + } 8.69 + 8.70 + @Override 8.71 + public void initialize() { 8.72 + // TODO Auto-generated method stub 8.73 + 8.74 + } 8.75 + 8.76 + @Override 8.77 + public void update(float tpf) { 8.78 + // TODO Auto-generated method stub 8.79 + 8.80 + } 8.81 + 8.82 + @Override 8.83 + public void cleanup() { 8.84 + // TODO Auto-generated method stub 8.85 + 8.86 + } 8.87 + 8.88 +}
9.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 9.2 +++ b/src/com/aurellem/capture/IVideoRecorder.java Tue Oct 25 11:55:55 2011 -0700 9.3 @@ -0,0 +1,21 @@ 9.4 +package com.aurellem.capture; 9.5 + 9.6 +import java.awt.image.BufferedImage; 9.7 + 9.8 +public interface IVideoRecorder{ 9.9 + 9.10 + void record(BufferedImage image); 9.11 + 9.12 + void pause(); 9.13 + 9.14 + void start(); 9.15 + 9.16 + /** 9.17 + * closes the video file, writing appropriate headers, trailers, etc. 9.18 + * After this is called, no more recording can be done. 9.19 + */ 9.20 + void finish(); 9.21 + 9.22 +} 9.23 + 9.24 +
10.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 10.2 +++ b/src/com/aurellem/capture/ImageOutputStreamAdapter.java Tue Oct 25 11:55:55 2011 -0700 10.3 @@ -0,0 +1,144 @@ 10.4 +/* 10.5 + * @(#)ImageOutputStreamAdapter.java 1.1 2011-01-07 10.6 + * 10.7 + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. 10.8 + * All rights reserved. 10.9 + * 10.10 + * You may not use, copy or modify this file, except in compliance with the 10.11 + * license agreement you entered into with Werner Randelshofer. 10.12 + * For details see accompanying license terms. 10.13 + */ 10.14 +package com.aurellem.capture; 10.15 + 10.16 +import java.io.FilterOutputStream; 10.17 +import java.io.IOException; 10.18 +import java.io.OutputStream; 10.19 +import javax.imageio.stream.ImageOutputStream; 10.20 + 10.21 +/** 10.22 + * Adapts an {@code ImageOutputStream} for classes requiring an 10.23 + * {@code OutputStream}. 10.24 + * 10.25 + * @author Werner Randelshofer 10.26 + * @version 1.1 2011-01-07 Fixes performance. 10.27 + * <br>1.0 2010-12-26 Created. 10.28 + */ 10.29 +public class ImageOutputStreamAdapter extends OutputStream { 10.30 + 10.31 + /** 10.32 + * The underlying output stream to be filtered. 10.33 + */ 10.34 + protected ImageOutputStream out; 10.35 + 10.36 + /** 10.37 + * Creates an output stream filter built on top of the specified 10.38 + * underlying output stream. 10.39 + * 10.40 + * @param out the underlying output stream to be assigned to 10.41 + * the field <tt>this.out</tt> for later use, or 10.42 + * <code>null</code> if this instance is to be 10.43 + * created without an underlying stream. 10.44 + */ 10.45 + public ImageOutputStreamAdapter(ImageOutputStream out) { 10.46 + this.out = out; 10.47 + } 10.48 + 10.49 + /** 10.50 + * Writes the specified <code>byte</code> to this output stream. 10.51 + * <p> 10.52 + * The <code>write</code> method of <code>FilterOutputStream</code> 10.53 + * calls the <code>write</code> method of its underlying output stream, 10.54 + * that is, it performs <tt>out.write(b)</tt>. 10.55 + * <p> 10.56 + * Implements the abstract <tt>write</tt> method of <tt>OutputStream</tt>. 10.57 + * 10.58 + * @param b the <code>byte</code>. 10.59 + * @exception IOException if an I/O error occurs. 10.60 + */ 10.61 + @Override 10.62 + public void write(int b) throws IOException { 10.63 + out.write(b); 10.64 + } 10.65 + 10.66 + /** 10.67 + * Writes <code>b.length</code> bytes to this output stream. 10.68 + * <p> 10.69 + * The <code>write</code> method of <code>FilterOutputStream</code> 10.70 + * calls its <code>write</code> method of three arguments with the 10.71 + * arguments <code>b</code>, <code>0</code>, and 10.72 + * <code>b.length</code>. 10.73 + * <p> 10.74 + * Note that this method does not call the one-argument 10.75 + * <code>write</code> method of its underlying stream with the single 10.76 + * argument <code>b</code>. 10.77 + * 10.78 + * @param b the data to be written. 10.79 + * @exception IOException if an I/O error occurs. 10.80 + * @see java.io.FilterOutputStream#write(byte[], int, int) 10.81 + */ 10.82 + @Override 10.83 + public void write(byte b[]) throws IOException { 10.84 + write(b, 0, b.length); 10.85 + } 10.86 + 10.87 + /** 10.88 + * Writes <code>len</code> bytes from the specified 10.89 + * <code>byte</code> array starting at offset <code>off</code> to 10.90 + * this output stream. 10.91 + * <p> 10.92 + * The <code>write</code> method of <code>FilterOutputStream</code> 10.93 + * calls the <code>write</code> method of one argument on each 10.94 + * <code>byte</code> to output. 10.95 + * <p> 10.96 + * Note that this method does not call the <code>write</code> method 10.97 + * of its underlying input stream with the same arguments. Subclasses 10.98 + * of <code>FilterOutputStream</code> should provide a more efficient 10.99 + * implementation of this method. 10.100 + * 10.101 + * @param b the data. 10.102 + * @param off the start offset in the data. 10.103 + * @param len the number of bytes to write. 10.104 + * @exception IOException if an I/O error occurs. 10.105 + * @see java.io.FilterOutputStream#write(int) 10.106 + */ 10.107 + @Override 10.108 + public void write(byte b[], int off, int len) throws IOException { 10.109 + out.write(b,off,len); 10.110 + } 10.111 + 10.112 + /** 10.113 + * Flushes this output stream and forces any buffered output bytes 10.114 + * to be written out to the stream. 10.115 + * <p> 10.116 + * The <code>flush</code> method of <code>FilterOutputStream</code> 10.117 + * calls the <code>flush</code> method of its underlying output stream. 10.118 + * 10.119 + * @exception IOException if an I/O error occurs. 10.120 + * @see java.io.FilterOutputStream#out 10.121 + */ 10.122 + @Override 10.123 + public void flush() throws IOException { 10.124 + out.flush(); 10.125 + } 10.126 + 10.127 + /** 10.128 + * Closes this output stream and releases any system resources 10.129 + * associated with the stream. 10.130 + * <p> 10.131 + * The <code>close</code> method of <code>FilterOutputStream</code> 10.132 + * calls its <code>flush</code> method, and then calls the 10.133 + * <code>close</code> method of its underlying output stream. 10.134 + * 10.135 + * @exception IOException if an I/O error occurs. 10.136 + * @see java.io.FilterOutputStream#flush() 10.137 + * @see java.io.FilterOutputStream#out 10.138 + */ 10.139 + @Override 10.140 + public void close() throws IOException { 10.141 + try { 10.142 + flush(); 10.143 + } finally { 10.144 + out.close(); 10.145 + } 10.146 + } 10.147 +}
11.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 11.2 +++ b/src/com/aurellem/capture/Main.java Tue Oct 25 11:55:55 2011 -0700 11.3 @@ -0,0 +1,110 @@ 11.4 +/** 11.5 + * @(#)Main.java 1.2 2009-08-29 11.6 + * 11.7 + * Copyright (c) 2008-2009 Werner Randelshofer, Immensee, Switzerland. 11.8 + * All rights reserved. 11.9 + * 11.10 + * You may not use, copy or modify this file, except in compliance with the 11.11 + * license agreement you entered into with Werner Randelshofer. 11.12 + * For details see accompanying license terms. 11.13 + */ 11.14 +package com.aurellem.capture; 11.15 + 11.16 +import java.awt.*; 11.17 +import java.awt.image.BufferedImage; 11.18 +import java.awt.image.IndexColorModel; 11.19 +import java.io.*; 11.20 +import java.util.Random; 11.21 + 11.22 + 11.23 +/** 11.24 + * Main. 11.25 + * 11.26 + * @author Werner Randelshofer 11.27 + * @version 1.1 2009-08-29 Added raw output. 11.28 + * <br>1.0 2008-00-15 Created. 11.29 + */ 11.30 +public class Main { 11.31 + 11.32 + /** 11.33 + * @param args the command line arguments 11.34 + */ 11.35 + public static void main(String[] args) { 11.36 + try { 11.37 + test(new File("/home/r/avidemo-jpg.avi"), AVIOutputStream.VideoFormat.JPG, 24, 1f); 11.38 + test(new File("/home/r/avidemo-png.avi"), AVIOutputStream.VideoFormat.PNG, 24, 1f); 11.39 + test(new File("/home/r/avidemo-raw.avi"), AVIOutputStream.VideoFormat.RAW, 24, 1f); 11.40 + test(new File("/home/r/avidemo-rle8.avi"), AVIOutputStream.VideoFormat.RLE, 8, 1f); 11.41 + test(new File("avidemo-rle4.avi"), AVIOutputStream.VideoFormat.RLE, 4, 1f); 11.42 + 11.43 + } catch (IOException ex) { 11.44 + ex.printStackTrace(); 11.45 + } 11.46 + } 11.47 + 11.48 + private static void test(File file, AVIOutputStream.VideoFormat format, int depth, float quality) throws IOException { 11.49 + System.out.println("Writing " + file); 11.50 + AVIOutputStream out = null; 11.51 + Graphics2D g = null; 11.52 + try { 11.53 + out = new AVIOutputStream(file, format, depth); 11.54 + out.setVideoCompressionQuality(quality); 11.55 + 11.56 + out.setTimeScale(1); 11.57 + out.setFrameRate(30); 11.58 + 11.59 + Random rnd = new Random(0); // use seed 0 to get reproducable output 11.60 + BufferedImage img; 11.61 + switch (depth) { 11.62 + case 24: 11.63 + default: { 11.64 + img = new BufferedImage(320, 160, BufferedImage.TYPE_INT_RGB); 11.65 + break; 11.66 + } 11.67 + case 8: { 11.68 + byte[] red = new byte[256]; 11.69 + byte[] green = new byte[256]; 11.70 + byte[] blue = new byte[256]; 11.71 + for (int i = 0; i < 255; i++) { 11.72 + red[i] = (byte) rnd.nextInt(256); 11.73 + green[i] = (byte) rnd.nextInt(256); 11.74 + blue[i] = (byte) rnd.nextInt(256); 11.75 + } 11.76 + rnd.setSeed(0); // set back to 0 for reproducable output 11.77 + img = new BufferedImage(320, 160, BufferedImage.TYPE_BYTE_INDEXED, new IndexColorModel(8, 256, red, green, blue)); 11.78 + break; 11.79 + } 11.80 + case 4: { 11.81 + byte[] red = new byte[16]; 11.82 + byte[] green = new byte[16]; 11.83 + byte[] blue = new byte[16]; 11.84 + for (int i = 0; i < 15; i++) { 11.85 + red[i] = (byte) rnd.nextInt(16); 11.86 + green[i] = (byte) rnd.nextInt(16); 11.87 + blue[i] = (byte) rnd.nextInt(16); 11.88 + } 11.89 + rnd.setSeed(0); // set back to 0 for reproducable output 11.90 + img = new BufferedImage(320, 160, BufferedImage.TYPE_BYTE_BINARY, new IndexColorModel(4, 16, red, green, blue)); 11.91 + break; 11.92 + } 11.93 + } 11.94 + g = img.createGraphics(); 11.95 + g.setBackground(Color.WHITE); 11.96 + g.clearRect(0, 0, img.getWidth(), img.getHeight()); 11.97 + 11.98 + for (int i = 0; i < 100; i++) { 11.99 + g.setColor(new Color(rnd.nextInt())); 11.100 + g.fillRect(rnd.nextInt(img.getWidth() - 30), rnd.nextInt(img.getHeight() - 30), 30, 30); 11.101 + out.writeFrame(img); 11.102 + } 11.103 + 11.104 + } finally { 11.105 + if (g != null) { 11.106 + g.dispose(); 11.107 + } 11.108 + if (out != null) { 11.109 + out.close(); 11.110 + } 11.111 + } 11.112 + } 11.113 +}
12.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 12.2 +++ b/src/com/aurellem/capture/MicrosoftRLEEncoder.java Tue Oct 25 11:55:55 2011 -0700 12.3 @@ -0,0 +1,400 @@ 12.4 +/* 12.5 + * @(#)AppleRLEEncoder.java 1.1.1 2011-01-17 12.6 + * 12.7 + * Copyright © 2011 Werner Randelshofer, Immensee, Switzerland. 12.8 + * All rights reserved. 12.9 + * 12.10 + * You may not use, copy or modify this file, except in compliance with the 12.11 + * license agreement you entered into with Werner Randelshofer. 12.12 + * For details see accompanying license terms. 12.13 + */ 12.14 +package com.aurellem.capture; 12.15 + 12.16 +import java.io.ByteArrayOutputStream; 12.17 +import java.io.IOException; 12.18 +import java.io.OutputStream; 12.19 +import java.util.Arrays; 12.20 + 12.21 +/** 12.22 + * Implements the run length encoding of the Microsoft RLE format. 12.23 + * <p> 12.24 + * Each line of a frame is compressed individually. A line consists of two-byte 12.25 + * op-codes optionally followed by data. The end of the line is marked with 12.26 + * the EOL op-code. 12.27 + * <p> 12.28 + * The following op-codes are supported: 12.29 + * <ul> 12.30 + * <li>{@code 0x00 0x00} 12.31 + * <br>Marks the end of a line.</li> 12.32 + * 12.33 + * <li>{@code 0x00 0x01} 12.34 + * <br>Marks the end of the bitmap.</li> 12.35 + * 12.36 + * <li>{@code 0x00 0x02 x y} 12.37 + * <br> Marks a delta (skip). {@code x} and {@code y} 12.38 + * indicate the horizontal and vertical offset from the current position. 12.39 + * {@code x} and {@code y} are unsigned 8-bit values.</li> 12.40 + * 12.41 + * <li>{@code 0x00 n data{n} 0x00?} 12.42 + * <br> Marks a literal run. {@code n} 12.43 + * gives the number of data bytes that follow. {@code n} must be between 3 and 12.44 + * 255. If n is odd, a pad byte with the value 0x00 must be added. 12.45 + * </li> 12.46 + * <li>{@code n data} 12.47 + * <br> Marks a repetition. {@code n} 12.48 + * gives the number of times the data byte is repeated. {@code n} must be 12.49 + * between 1 and 255. 12.50 + * </li> 12.51 + * </ul> 12.52 + * Example: 12.53 + * <pre> 12.54 + * Compressed data Expanded data 12.55 + * 12.56 + * 03 04 04 04 04 12.57 + * 05 06 06 06 06 06 06 12.58 + * 00 03 45 56 67 00 45 56 67 12.59 + * 02 78 78 78 12.60 + * 00 02 05 01 Move 5 right and 1 down 12.61 + * 02 78 78 78 12.62 + * 00 00 End of line 12.63 + * 09 1E 1E 1E 1E 1E 1E 1E 1E 1E 1E 12.64 + * 00 01 End of RLE bitmap 12.65 + * </pre> 12.66 + * 12.67 + * References:<br/> 12.68 + * <a href="http://wiki.multimedia.cx/index.php?title=Microsoft_RLE">http://wiki.multimedia.cx/index.php?title=Microsoft_RLE</a><br> 12.69 + * 12.70 + * @author Werner Randelshofer 12.71 + * @version 1.1.1 2011-01-17 Removes unused imports. 12.72 + * <br>1.1 2011-01-07 Improves performance. 12.73 + * <br>1.0 2011-01-05 Created. 12.74 + */ 12.75 +public class MicrosoftRLEEncoder { 12.76 + 12.77 + private SeekableByteArrayOutputStream tempSeek=new SeekableByteArrayOutputStream(); 12.78 + private DataChunkOutputStream temp=new DataChunkOutputStream(tempSeek); 12.79 + 12.80 + /** Encodes a 8-bit key frame. 12.81 + * 12.82 + * @param temp The output stream. Must be set to Big-Endian. 12.83 + * @param data The image data. 12.84 + * @param offset The offset to the first pixel in the data array. 12.85 + * @param length The width of the image in data elements. 12.86 + * @param step The number to add to offset to get to the next scanline. 12.87 + */ 12.88 + public void writeKey8(OutputStream out, byte[] data, int offset, int length, int step, int height) 12.89 + throws IOException { 12.90 + tempSeek.reset(); 12.91 + int ymax = offset + height * step; 12.92 + int upsideDown = ymax-step+offset; 12.93 + 12.94 + // Encode each scanline separately 12.95 + for (int y = offset; y < ymax; y += step) { 12.96 + int xy = upsideDown-y; 12.97 + int xymax = xy + length; 12.98 + 12.99 + int literalCount = 0; 12.100 + int repeatCount = 0; 12.101 + for (; xy < xymax; ++xy) { 12.102 + // determine repeat count 12.103 + byte v = data[xy]; 12.104 + for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { 12.105 + if (data[xy] != v) { 12.106 + break; 12.107 + } 12.108 + } 12.109 + xy -= repeatCount; 12.110 + if (repeatCount < 3) { 12.111 + literalCount++; 12.112 + if (literalCount == 254) { 12.113 + temp.write(0);temp.write(literalCount); // Literal OP-code 12.114 + temp.write(data, xy - literalCount + 1, literalCount); 12.115 + literalCount = 0; 12.116 + } 12.117 + } else { 12.118 + if (literalCount > 0) { 12.119 + if (literalCount < 3) { 12.120 + for (; literalCount > 0; --literalCount) { 12.121 + temp.write(1); // Repeat OP-code 12.122 + temp.write(data[xy - literalCount]); 12.123 + } 12.124 + } else { 12.125 + temp.write(0);temp.write(literalCount); // Literal OP-code 12.126 + temp.write(data, xy - literalCount, literalCount); 12.127 + if (literalCount % 2 == 1) { 12.128 + temp.write(0); // pad byte 12.129 + } 12.130 + literalCount = 0; 12.131 + } 12.132 + } 12.133 + temp.write(repeatCount); // Repeat OP-code 12.134 + temp.write(v); 12.135 + xy += repeatCount - 1; 12.136 + } 12.137 + } 12.138 + 12.139 + // flush literal run 12.140 + if (literalCount > 0) { 12.141 + if (literalCount < 3) { 12.142 + for (; literalCount > 0; --literalCount) { 12.143 + temp.write(1); // Repeat OP-code 12.144 + temp.write(data[xy - literalCount]); 12.145 + } 12.146 + } else { 12.147 + temp.write(0);temp.write(literalCount); 12.148 + temp.write(data, xy - literalCount, literalCount); 12.149 + if (literalCount % 2 == 1) { 12.150 + temp.write(0); // pad byte 12.151 + } 12.152 + } 12.153 + literalCount = 0; 12.154 + } 12.155 + 12.156 + temp.write(0);temp.write(0x0000);// End of line 12.157 + } 12.158 + temp.write(0);temp.write(0x0001);// End of bitmap 12.159 + tempSeek.toOutputStream(out); 12.160 + } 12.161 + 12.162 + /** Encodes a 8-bit delta frame. 12.163 + * 12.164 + * @param temp The output stream. Must be set to Big-Endian. 12.165 + * @param data The image data. 12.166 + * @param prev The image data of the previous frame. 12.167 + * @param offset The offset to the first pixel in the data array. 12.168 + * @param length The width of the image in data elements. 12.169 + * @param step The number to add to offset to get to the next scanline. 12.170 + */ 12.171 + public void writeDelta8(OutputStream out, byte[] data, byte[] prev, int offset, int length, int step, int height) 12.172 + throws IOException { 12.173 + 12.174 +tempSeek.reset(); 12.175 + // Determine whether we can skip lines at the beginning 12.176 + int ymin; 12.177 + int ymax = offset + height * step; 12.178 + int upsideDown = ymax-step+offset; 12.179 + scanline: 12.180 + for (ymin = offset; ymin < ymax; ymin += step) { 12.181 + int xy = upsideDown-ymin; 12.182 + int xymax = xy + length; 12.183 + for (; xy < xymax; ++xy) { 12.184 + if (data[xy] != prev[xy]) { 12.185 + break scanline; 12.186 + } 12.187 + } 12.188 + } 12.189 + 12.190 + if (ymin == ymax) { 12.191 + // => Frame is identical to previous one 12.192 + temp.write(0);temp.write(0x0001); // end of bitmap 12.193 + return; 12.194 + } 12.195 + 12.196 + if (ymin > offset) { 12.197 + int verticalOffset = ymin / step; 12.198 + while (verticalOffset > 255) { 12.199 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.200 + temp.write(0); // horizontal offset 12.201 + temp.write(255); // vertical offset 12.202 + verticalOffset -= 255; 12.203 + } 12.204 + if (verticalOffset == 1) { 12.205 + temp.write(0);temp.write(0x0000); // End of line OP-code 12.206 + } else { 12.207 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.208 + temp.write(0); // horizontal offset 12.209 + temp.write(verticalOffset); // vertical offset 12.210 + } 12.211 + } 12.212 + 12.213 + 12.214 + // Determine whether we can skip lines at the end 12.215 + scanline: 12.216 + for (; ymax > ymin; ymax -= step) { 12.217 + int xy = upsideDown-ymax+step; 12.218 + int xymax = xy + length; 12.219 + for (; xy < xymax; ++xy) { 12.220 + if (data[xy] != prev[xy]) { 12.221 + break scanline; 12.222 + } 12.223 + } 12.224 + } 12.225 + //System.out.println("MicrosoftRLEEncoder ymin:" + ymin / step + " ymax" + ymax / step); 12.226 + 12.227 + 12.228 + // Encode each scanline 12.229 + int verticalOffset = 0; 12.230 + for (int y = ymin; y < ymax; y += step) { 12.231 + int xy = upsideDown-y; 12.232 + int xymax = xy + length; 12.233 + 12.234 + // determine skip count 12.235 + int skipCount = 0; 12.236 + for (; xy < xymax; ++xy, ++skipCount) { 12.237 + if (data[xy] != prev[xy]) { 12.238 + break; 12.239 + } 12.240 + } 12.241 + if (skipCount == length) { 12.242 + // => the entire line can be skipped 12.243 + ++verticalOffset; 12.244 + if (verticalOffset == 255) { 12.245 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.246 + temp.write(0); // horizontal offset 12.247 + temp.write(255); // vertical offset 12.248 + verticalOffset = 0; 12.249 + } 12.250 + continue; 12.251 + } 12.252 + 12.253 + if (verticalOffset > 0 || skipCount > 0) { 12.254 + if (verticalOffset == 1 && skipCount == 0) { 12.255 + temp.write(0);temp.write(0x0000); // End of line OP-code 12.256 + } else { 12.257 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.258 + temp.write(Math.min(255, skipCount)); // horizontal offset 12.259 + skipCount -= 255; 12.260 + temp.write(verticalOffset); // vertical offset 12.261 + } 12.262 + verticalOffset = 0; 12.263 + } 12.264 + while (skipCount > 0) { 12.265 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.266 + temp.write(Math.min(255, skipCount)); // horizontal offset 12.267 + temp.write(0); // vertical offset 12.268 + skipCount -= 255; 12.269 + } 12.270 + 12.271 + int literalCount = 0; 12.272 + int repeatCount = 0; 12.273 + for (; xy < xymax; ++xy) { 12.274 + // determine skip count 12.275 + for (skipCount = 0; xy < xymax; ++xy, ++skipCount) { 12.276 + if (data[xy] != prev[xy]) { 12.277 + break; 12.278 + } 12.279 + } 12.280 + xy -= skipCount; 12.281 + 12.282 + // determine repeat count 12.283 + byte v = data[xy]; 12.284 + for (repeatCount = 0; xy < xymax && repeatCount < 255; ++xy, ++repeatCount) { 12.285 + if (data[xy] != v) { 12.286 + break; 12.287 + } 12.288 + } 12.289 + xy -= repeatCount; 12.290 + 12.291 + if (skipCount < 4 && xy + skipCount < xymax && repeatCount < 3) { 12.292 + literalCount++; 12.293 + if (literalCount == 254) { 12.294 + temp.write(0);temp.write(literalCount); // Literal OP-code 12.295 + temp.write(data, xy - literalCount + 1, literalCount); 12.296 + literalCount = 0; 12.297 + } 12.298 + } else { 12.299 + if (literalCount > 0) { 12.300 + if (literalCount < 3) { 12.301 + for (; literalCount > 0; --literalCount) { 12.302 + temp.write(1); // Repeat OP-code 12.303 + temp.write(data[xy - literalCount]); 12.304 + } 12.305 + } else { 12.306 + temp.write(0);temp.write(literalCount); 12.307 + temp.write(data, xy - literalCount, literalCount); 12.308 + if (literalCount % 2 == 1) { 12.309 + temp.write(0); // pad byte 12.310 + } 12.311 + } 12.312 + literalCount = 0; 12.313 + } 12.314 + if (xy + skipCount == xymax) { 12.315 + // => we can skip until the end of the line without 12.316 + // having to write an op-code 12.317 + xy += skipCount - 1; 12.318 + } else if (skipCount >= repeatCount) { 12.319 + while (skipCount > 255) { 12.320 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.321 + temp.write(255); 12.322 + temp.write(0); 12.323 + xy += 255; 12.324 + skipCount -= 255; 12.325 + } 12.326 + temp.write(0);temp.write(0x0002); // Skip OP-code 12.327 + temp.write(skipCount); 12.328 + temp.write(0); 12.329 + xy += skipCount - 1; 12.330 + } else { 12.331 + temp.write(repeatCount); // Repeat OP-code 12.332 + temp.write(v); 12.333 + xy += repeatCount - 1; 12.334 + } 12.335 + } 12.336 + } 12.337 + 12.338 + // flush literal run 12.339 + if (literalCount > 0) { 12.340 + if (literalCount < 3) { 12.341 + for (; literalCount > 0; --literalCount) { 12.342 + temp.write(1); // Repeat OP-code 12.343 + temp.write(data[xy - literalCount]); 12.344 + } 12.345 + } else { 12.346 + temp.write(0);temp.write(literalCount); 12.347 + temp.write(data, xy - literalCount, literalCount); 12.348 + if (literalCount % 2 == 1) { 12.349 + temp.write(0); // pad byte 12.350 + } 12.351 + } 12.352 + } 12.353 + 12.354 + temp.write(0);temp.write(0x0000); // End of line OP-code 12.355 + } 12.356 + 12.357 + temp.write(0);temp.write(0x0001);// End of bitmap 12.358 + tempSeek.toOutputStream(out); 12.359 + } 12.360 + 12.361 + public static void main(String[] args) { 12.362 + byte[] data = {// 12.363 + 8, 2, 3, 4, 4, 3,7,7,7, 8,// 12.364 + 8, 1, 1, 1, 1, 2,7,7,7, 8,// 12.365 + 8, 0, 2, 0, 0, 0,7,7,7, 8,// 12.366 + 8, 2, 2, 3, 4, 4,7,7,7, 8,// 12.367 + 8, 1, 4, 4, 4, 5,7,7,7, 8}; 12.368 + 12.369 + 12.370 + byte[] prev = {// 12.371 + 8, 3, 3, 3, 3, 3,7,7,7, 8,// 12.372 + 8, 1, 1, 1, 1, 1,7,7,7, 8, // 12.373 + 8, 5, 5, 5, 5, 0,7,7,7, 8,// 12.374 + 8, 2, 2, 0, 0, 0,7,7,7, 8,// 12.375 + 8, 2, 0, 0, 0, 5,7,7,7, 8}; 12.376 + ByteArrayOutputStream buf = new ByteArrayOutputStream(); 12.377 + DataChunkOutputStream out = new DataChunkOutputStream(buf); 12.378 + MicrosoftRLEEncoder enc = new MicrosoftRLEEncoder(); 12.379 + 12.380 + try { 12.381 + enc.writeDelta8(out, data, prev, 1, 8, 10, 5); 12.382 + //enc.writeKey8(out, data, 1, 8, 10,5); 12.383 + out.close(); 12.384 + 12.385 + byte[] result = buf.toByteArray(); 12.386 + System.out.println("size:" + result.length); 12.387 + System.out.println(Arrays.toString(result)); 12.388 + System.out.print("0x ["); 12.389 + 12.390 + for (int i = 0; i < result.length; i++) { 12.391 + if (i != 0) { 12.392 + System.out.print(','); 12.393 + } 12.394 + String hex = "00" + Integer.toHexString(result[i]); 12.395 + System.out.print(hex.substring(hex.length() - 2)); 12.396 + } 12.397 + System.out.println(']'); 12.398 + 12.399 + } catch (IOException ex) { 12.400 + ex.printStackTrace(); 12.401 + } 12.402 + } 12.403 +}
13.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 13.2 +++ b/src/com/aurellem/capture/MultiListener.java Tue Oct 25 11:55:55 2011 -0700 13.3 @@ -0,0 +1,11 @@ 13.4 +package com.aurellem.capture; 13.5 + 13.6 +import com.jme3.audio.Listener; 13.7 + 13.8 +public interface MultiListener { 13.9 + 13.10 + void addListener(Listener l); 13.11 + void registerSoundProcessor(Listener l, SoundProcessor sp); 13.12 + void registerSoundProcessor(SoundProcessor sp); 13.13 + 13.14 +}
14.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 14.2 +++ b/src/com/aurellem/capture/SeekableByteArrayOutputStream.java Tue Oct 25 11:55:55 2011 -0700 14.3 @@ -0,0 +1,153 @@ 14.4 +/* 14.5 + * @(#)SeekableByteArrayOutputStream.java 1.0 2010-12-27 14.6 + * 14.7 + * Copyright © 2010 Werner Randelshofer, Immensee, Switzerland. 14.8 + * All rights reserved. 14.9 + * 14.10 + * You may not use, copy or modify this file, except in compliance with the 14.11 + * license agreement you entered into with Werner Randelshofer. 14.12 + * For details see accompanying license terms. 14.13 + */ 14.14 + 14.15 +package com.aurellem.capture; 14.16 + 14.17 +import java.io.ByteArrayOutputStream; 14.18 +import java.io.IOException; 14.19 +import java.io.OutputStream; 14.20 +import java.util.Arrays; 14.21 +import static java.lang.Math.*; 14.22 +/** 14.23 + * {@code SeekableByteArrayOutputStream}. 14.24 + * 14.25 + * @author Werner Randelshofer 14.26 + * @version 1.0 2010-12-27 Created. 14.27 + */ 14.28 +public class SeekableByteArrayOutputStream extends ByteArrayOutputStream { 14.29 + 14.30 + /** 14.31 + * The current stream position. 14.32 + */ 14.33 + private int pos; 14.34 + 14.35 + /** 14.36 + * Creates a new byte array output stream. The buffer capacity is 14.37 + * initially 32 bytes, though its size increases if necessary. 14.38 + */ 14.39 + public SeekableByteArrayOutputStream() { 14.40 + this(32); 14.41 + } 14.42 + 14.43 + /** 14.44 + * Creates a new byte array output stream, with a buffer capacity of 14.45 + * the specified size, in bytes. 14.46 + * 14.47 + * @param size the initial size. 14.48 + * @exception IllegalArgumentException if size is negative. 14.49 + */ 14.50 + public SeekableByteArrayOutputStream(int size) { 14.51 + if (size < 0) { 14.52 + throw new IllegalArgumentException("Negative initial size: " 14.53 + + size); 14.54 + } 14.55 + buf = new byte[size]; 14.56 + } 14.57 + 14.58 + /** 14.59 + * Writes the specified byte to this byte array output stream. 14.60 + * 14.61 + * @param b the byte to be written. 14.62 + */ 14.63 + @Override 14.64 + public synchronized void write(int b) { 14.65 + int newcount = max(pos + 1, count); 14.66 + if (newcount > buf.length) { 14.67 + buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); 14.68 + } 14.69 + buf[pos++] = (byte)b; 14.70 + count = newcount; 14.71 + } 14.72 + 14.73 + /** 14.74 + * Writes <code>len</code> bytes from the specified byte array 14.75 + * starting at offset <code>off</code> to this byte array output stream. 14.76 + * 14.77 + * @param b the data. 14.78 + * @param off the start offset in the data. 14.79 + * @param len the number of bytes to write. 14.80 + */ 14.81 + @Override 14.82 + public synchronized void write(byte b[], int off, int len) { 14.83 + if ((off < 0) || (off > b.length) || (len < 0) || 14.84 + ((off + len) > b.length) || ((off + len) < 0)) { 14.85 + throw new IndexOutOfBoundsException(); 14.86 + } else if (len == 0) { 14.87 + return; 14.88 + } 14.89 + int newcount = max(pos+len,count); 14.90 + if (newcount > buf.length) { 14.91 + buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); 14.92 + } 14.93 + System.arraycopy(b, off, buf, pos, len); 14.94 + pos+=len; 14.95 + count = newcount; 14.96 + } 14.97 + 14.98 + /** 14.99 + * Resets the <code>count</code> field of this byte array output 14.100 + * stream to zero, so that all currently accumulated output in the 14.101 + * output stream is discarded. The output stream can be used again, 14.102 + * reusing the already allocated buffer space. 14.103 + * 14.104 + * @see java.io.ByteArrayInputStream#count 14.105 + */ 14.106 + @Override 14.107 + public synchronized void reset() { 14.108 + count = 0; 14.109 + pos=0; 14.110 + } 14.111 + 14.112 + /** 14.113 + * Sets the current stream position to the desired location. The 14.114 + * next read will occur at this location. The bit offset is set 14.115 + * to 0. 14.116 + * 14.117 + * <p> An <code>IndexOutOfBoundsException</code> will be thrown if 14.118 + * <code>pos</code> is smaller than the flushed position (as 14.119 + * returned by <code>getflushedPosition</code>). 14.120 + * 14.121 + * <p> It is legal to seek past the end of the file; an 14.122 + * <code>EOFException</code> will be thrown only if a read is 14.123 + * performed. 14.124 + * 14.125 + * @param pos a <code>long</code> containing the desired file 14.126 + * pointer position. 14.127 + * 14.128 + * @exception IndexOutOfBoundsException if <code>pos</code> is smaller 14.129 + * than the flushed position. 14.130 + * @exception IOException if any other I/O error occurs. 14.131 + */ 14.132 + public void seek(long pos) throws IOException { 14.133 + this.pos = (int)pos; 14.134 + } 14.135 + 14.136 + /** 14.137 + * Returns the current byte position of the stream. The next write 14.138 + * will take place starting at this offset. 14.139 + * 14.140 + * @return a long containing the position of the stream. 14.141 + * 14.142 + * @exception IOException if an I/O error occurs. 14.143 + */ 14.144 + public long getStreamPosition() throws IOException { 14.145 + return pos; 14.146 + } 14.147 + 14.148 + /** Writes the contents of the byte array into the specified output 14.149 + * stream. 14.150 + * @param out 14.151 + */ 14.152 + public void toOutputStream(OutputStream out) throws IOException { 14.153 + out.write(buf, 0, count); 14.154 + } 14.155 + 14.156 +}
15.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 15.2 +++ b/src/com/aurellem/capture/SoundProcessor.java Tue Oct 25 11:55:55 2011 -0700 15.3 @@ -0,0 +1,11 @@ 15.4 +package com.aurellem.capture; 15.5 + 15.6 +import java.nio.ByteBuffer; 15.7 + 15.8 +public interface SoundProcessor { 15.9 + 15.10 + void cleanup(); 15.11 + 15.12 + void process(ByteBuffer audioSamples, int numSamples); 15.13 + 15.14 +}
16.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 16.2 +++ b/src/com/aurellem/capture/StdAudio.java Tue Oct 25 11:55:55 2011 -0700 16.3 @@ -0,0 +1,312 @@ 16.4 +package com.aurellem.capture; 16.5 + 16.6 + 16.7 +/************************************************************************* 16.8 + * Compilation: javac StdAudio.java 16.9 + * Execution: java StdAudio 16.10 + * 16.11 + * Simple library for reading, writing, and manipulating .wav files. 16.12 + 16.13 + * 16.14 + * Limitations 16.15 + * ----------- 16.16 + * - Does not seem to work properly when reading .wav files from a .jar file. 16.17 + * - Assumes the audio is monaural, with sampling rate of 44,100. 16.18 + * 16.19 + *************************************************************************/ 16.20 + 16.21 +import java.applet.Applet; 16.22 +import java.applet.AudioClip; 16.23 +import java.io.ByteArrayInputStream; 16.24 +import java.io.File; 16.25 +import java.net.MalformedURLException; 16.26 +import java.net.URL; 16.27 + 16.28 +import javax.sound.sampled.AudioFileFormat; 16.29 +import javax.sound.sampled.AudioFormat; 16.30 +import javax.sound.sampled.AudioInputStream; 16.31 +import javax.sound.sampled.AudioSystem; 16.32 +import javax.sound.sampled.SourceDataLine; 16.33 + 16.34 +/** 16.35 + * <i>Standard audio</i>. This class provides a basic capability for 16.36 + * creating, reading, and saving audio. 16.37 + * <p> 16.38 + * The audio format uses a sampling rate of 44,100 (CD quality audio), 16-bit, monaural. 16.39 + * 16.40 + * <p> 16.41 + * For additional documentation, see <a href="http://introcs.cs.princeton.edu/15inout">Section 1.5</a> of 16.42 + * <i>Introduction to Programming in Java: An Interdisciplinary Approach</i> by Robert Sedgewick and Kevin Wayne. 16.43 + */ 16.44 +public final class StdAudio { 16.45 + 16.46 + /** 16.47 + * The sample rate - 44,100 Hz for CD quality audio. 16.48 + */ 16.49 + public static final int SAMPLE_RATE = 44100; 16.50 + 16.51 + //private static final int BYTES_PER_SAMPLE = 2; // 16-bit audio 16.52 + //private static final int BITS_PER_SAMPLE = 16; // 16-bit audio 16.53 + private static final double MAX_16_BIT = Short.MAX_VALUE; // 32,767 16.54 + //private static final int SAMPLE_BUFFER_SIZE = 4096; 16.55 + 16.56 + 16.57 + private static SourceDataLine line; // to play the sound 16.58 + private static byte[] buffer; // our internal buffer 16.59 + private static int bufferSize = 0; // number of samples currently in internal buffer 16.60 + 16.61 + // not-instantiable 16.62 + private StdAudio() { } 16.63 + 16.64 + 16.65 + // static initializer 16.66 + //static { init(); } 16.67 + 16.68 + // open up an audio stream 16.69 + 16.70 + /* 16.71 + private static void init() { 16.72 + try { 16.73 + // 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian 16.74 + AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); 16.75 + DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); 16.76 + 16.77 + line = (SourceDataLine) AudioSystem.getLine(info); 16.78 + 16.79 + // RLM: set to 1 and see what happens! 16.80 + line.open(format, SAMPLE_BUFFER_SIZE); 16.81 + //line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); 16.82 + 16.83 + // the internal buffer is a fraction of the actual buffer size, this choice is arbitrary 16.84 + // it gets divided because we can't expect the buffered data to line up exactly with when 16.85 + // the sound card decides to push out its samples. 16.86 + buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE/3]; 16.87 + } catch (Exception e) { 16.88 + System.out.println(e.getMessage()); 16.89 + System.exit(1); 16.90 + } 16.91 + 16.92 + // no sound gets made before this call 16.93 + line.start(); 16.94 + } 16.95 + */ 16.96 + 16.97 + 16.98 + /** 16.99 + * Close standard audio. 16.100 + */ 16.101 + public static void close() { 16.102 + line.drain(); 16.103 + line.stop(); 16.104 + } 16.105 + 16.106 + /** 16.107 + * Write one sample (between -1.0 and +1.0) to standard audio. If the sample 16.108 + * is outside the range, it will be clipped. 16.109 + */ 16.110 + public static void play(double in) { 16.111 + 16.112 + // clip if outside [-1, +1] 16.113 + if (in < -1.0) in = -1.0; 16.114 + if (in > +1.0) in = +1.0; 16.115 + 16.116 + // convert to bytes 16.117 + short s = (short) (MAX_16_BIT * in); 16.118 + buffer[bufferSize++] = (byte) s; 16.119 + buffer[bufferSize++] = (byte) (s >> 8); // little Endian 16.120 + 16.121 + // send to sound card if buffer is full 16.122 + if (bufferSize >= buffer.length) { 16.123 + line.write(buffer, 0, buffer.length); 16.124 + bufferSize = 0; 16.125 + } 16.126 + } 16.127 + 16.128 + /** 16.129 + * Write an array of samples (between -1.0 and +1.0) to standard audio. If a sample 16.130 + * is outside the range, it will be clipped. 16.131 + */ 16.132 + public static void play(double[] input) { 16.133 + for (int i = 0; i < input.length; i++) { 16.134 + play(input[i]); 16.135 + } 16.136 + } 16.137 + 16.138 + /** 16.139 + * Read audio samples from a file (in .wav or .au format) and return them as a double array 16.140 + * with values between -1.0 and +1.0. 16.141 + */ 16.142 + public static double[] read(String filename) { 16.143 + byte[] data = readByte(filename); 16.144 + int N = data.length; 16.145 + double[] d = new double[N/2]; 16.146 + for (int i = 0; i < N/2; i++) { 16.147 + d[i] = ((short) (((data[2*i+1] & 0xFF) << 8) + (data[2*i] & 0xFF))) / ((double) MAX_16_BIT); 16.148 + } 16.149 + return d; 16.150 + } 16.151 + 16.152 + 16.153 + 16.154 + 16.155 + /** 16.156 + * Play a sound file (in .wav or .au format) in a background thread. 16.157 + */ 16.158 + public static void play(String filename) { 16.159 + URL url = null; 16.160 + try { 16.161 + File file = new File(filename); 16.162 + if (file.canRead()) url = file.toURI().toURL(); 16.163 + } 16.164 + catch (MalformedURLException e) { e.printStackTrace(); } 16.165 + // URL url = StdAudio.class.getResource(filename); 16.166 + if (url == null) throw new RuntimeException("audio " + filename + " not found"); 16.167 + AudioClip clip = Applet.newAudioClip(url); 16.168 + clip.play(); 16.169 + } 16.170 + 16.171 + /** 16.172 + * Loop a sound file (in .wav or .au format) in a background thread. 16.173 + */ 16.174 + public static void loop(String filename) { 16.175 + URL url = null; 16.176 + try { 16.177 + File file = new File(filename); 16.178 + if (file.canRead()) url = file.toURI().toURL(); 16.179 + } 16.180 + catch (MalformedURLException e) { e.printStackTrace(); } 16.181 + // URL url = StdAudio.class.getResource(filename); 16.182 + if (url == null) throw new RuntimeException("audio " + filename + " not found"); 16.183 + AudioClip clip = Applet.newAudioClip(url); 16.184 + clip.loop(); 16.185 + } 16.186 + 16.187 + 16.188 + // return data as a byte array 16.189 + private static byte[] readByte(String filename) { 16.190 + byte[] data = null; 16.191 + AudioInputStream ais = null; 16.192 + try { 16.193 + URL url = StdAudio.class.getResource(filename); 16.194 + ais = AudioSystem.getAudioInputStream(url); 16.195 + data = new byte[ais.available()]; 16.196 + ais.read(data); 16.197 + } 16.198 + catch (Exception e) { 16.199 + System.out.println(e.getMessage()); 16.200 + throw new RuntimeException("Could not read " + filename); 16.201 + } 16.202 + 16.203 + return data; 16.204 + } 16.205 + 16.206 + 16.207 + 16.208 + /** 16.209 + * Save the double array as a sound file (using .wav or .au format). 16.210 + */ 16.211 + public static void save(String filename, double[] input) { 16.212 + 16.213 + // assumes 44,100 samples per second 16.214 + // use 16-bit audio, mono, signed PCM, little Endian 16.215 + AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false); 16.216 + byte[] data = new byte[2 * input.length]; 16.217 + for (int i = 0; i < input.length; i++) { 16.218 + int temp = (short) (input[i] * MAX_16_BIT); 16.219 + data[2*i + 0] = (byte) temp; 16.220 + data[2*i + 1] = (byte) (temp >> 8); 16.221 + } 16.222 + 16.223 + // now save the file 16.224 + try { 16.225 + ByteArrayInputStream bais = new ByteArrayInputStream(data); 16.226 + AudioInputStream ais = new AudioInputStream(bais, format, input.length); 16.227 + if (filename.endsWith(".wav") || filename.endsWith(".WAV")) { 16.228 + AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename)); 16.229 + } 16.230 + else if (filename.endsWith(".au") || filename.endsWith(".AU")) { 16.231 + AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename)); 16.232 + } 16.233 + else { 16.234 + throw new RuntimeException("File format not supported: " + filename); 16.235 + } 16.236 + } 16.237 + catch (Exception e) { 16.238 + System.out.println(e); 16.239 + System.exit(1); 16.240 + } 16.241 + } 16.242 + 16.243 + public static void save(String filename, byte[] data){ 16.244 + // now save the file 16.245 + AudioFormat format = new AudioFormat(SAMPLE_RATE, 32, 1, true, false); 16.246 + 16.247 + try { 16.248 + ByteArrayInputStream bais = new ByteArrayInputStream(data); 16.249 + AudioInputStream ais = new AudioInputStream(bais, format, data.length/2); 16.250 + if (filename.endsWith(".wav") || filename.endsWith(".WAV")) { 16.251 + AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename)); 16.252 + } 16.253 + else if (filename.endsWith(".au") || filename.endsWith(".AU")) { 16.254 + AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename)); 16.255 + } 16.256 + else { 16.257 + throw new RuntimeException("File format not supported: " + filename); 16.258 + } 16.259 + } 16.260 + catch (Exception e) { 16.261 + System.out.println(e); 16.262 + System.exit(1); 16.263 + } 16.264 + } 16.265 + 16.266 + /* 16.267 + public static void save(String filename, Byte[] data){ 16.268 + // now save the file 16.269 + save(filename, ArrayUtils.toPrimitive(data)); 16.270 + 16.271 + } 16.272 + */ 16.273 + 16.274 + /*********************************************************************** 16.275 + * sample test client 16.276 + ***********************************************************************/ 16.277 + 16.278 + // create a note (sine wave) of the given frequency (Hz), for the given 16.279 + // duration (seconds) scaled to the given volume (amplitude) 16.280 + private static double[] note(double hz, double duration, double amplitude) { 16.281 + int N = (int) (StdAudio.SAMPLE_RATE * duration); 16.282 + double[] a = new double[N+1]; 16.283 + for (int i = 0; i <= N; i++) 16.284 + a[i] = amplitude * Math.sin(2 * Math.PI * i * hz / StdAudio.SAMPLE_RATE); 16.285 + return a; 16.286 + } 16.287 + 16.288 + /** 16.289 + * Test client - play an A major scale to standard audio. 16.290 + */ 16.291 + public static void main(String[] args) { 16.292 + 16.293 + // 440 Hz for 1 sec 16.294 + double freq = 440.0; 16.295 + for (int i = 0; i <= StdAudio.SAMPLE_RATE; i++) { 16.296 + StdAudio.play(0.5 * Math.sin(2*Math.PI * freq * i / StdAudio.SAMPLE_RATE)); 16.297 + } 16.298 + 16.299 + // scale increments 16.300 + int[] steps = { 0, 2, 4, 5, 7, 9, 11, 12 }; 16.301 + for (int i = 0; i < steps.length; i++) { 16.302 + double hz = 440.0 * Math.pow(2, steps[i] / 12.0); 16.303 + StdAudio.play(note(hz, 1.0, 0.5)); 16.304 + } 16.305 + 16.306 + 16.307 + // need to call this in non-interactive stuff so the program doesn't terminate 16.308 + // until all the sound leaves the speaker. 16.309 + StdAudio.close(); 16.310 + 16.311 + // need to terminate a Java program with sound 16.312 + System.exit(0); 16.313 + } 16.314 +} 16.315 +
17.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 17.2 +++ b/src/com/aurellem/capture/TestAurellem.java Tue Oct 25 11:55:55 2011 -0700 17.3 @@ -0,0 +1,9 @@ 17.4 +package com.aurellem.capture; 17.5 + 17.6 +public class TestAurellem { 17.7 + 17.8 + public static void main(String[] ignore){ 17.9 + com.aurellem.capture.hello.HelloAudio game = new com.aurellem.capture.hello.HelloAudio(); 17.10 + game.start(); 17.11 + } 17.12 +}
18.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 18.2 +++ b/src/com/aurellem/capture/WaveFileWriter.java Tue Oct 25 11:55:55 2011 -0700 18.3 @@ -0,0 +1,46 @@ 18.4 +package com.aurellem.capture; 18.5 + 18.6 +import java.io.ByteArrayInputStream; 18.7 +import java.io.File; 18.8 +import java.io.IOException; 18.9 +import java.nio.ByteBuffer; 18.10 +import java.util.Vector; 18.11 + 18.12 +import javax.sound.sampled.AudioFileFormat; 18.13 +import javax.sound.sampled.AudioFormat; 18.14 +import javax.sound.sampled.AudioInputStream; 18.15 +import javax.sound.sampled.AudioSystem; 18.16 + 18.17 +public class WaveFileWriter implements SoundProcessor { 18.18 + 18.19 + public Vector<Byte> fullWaveData = new Vector<Byte>(); 18.20 + public File targetFile; 18.21 + 18.22 + public WaveFileWriter(File targetFile){ 18.23 + this.targetFile = targetFile; 18.24 + } 18.25 + 18.26 + public void cleanup() { 18.27 + byte[] data = new byte[this.fullWaveData.size()]; 18.28 + 18.29 + for (int i = 0; i < this.fullWaveData.size(); i++){ 18.30 + data[i] = this.fullWaveData.get(i);} 18.31 + 18.32 + 18.33 + ByteArrayInputStream input = new ByteArrayInputStream(data); 18.34 + AudioFormat format = new AudioFormat(44100.0f, 32, 1, true, false); 18.35 + AudioInputStream audioInput = new AudioInputStream(input, format, data.length / 4 ); 18.36 + try {AudioSystem.write(audioInput, AudioFileFormat.Type.WAVE, targetFile);} 18.37 + catch (IOException e) {e.printStackTrace();} 18.38 + 18.39 + } 18.40 + 18.41 + 18.42 + public void process(ByteBuffer audioSamples, int numSamples) { 18.43 + for (int i = 0; i<numSamples; i++){ 18.44 + Byte b = audioSamples.get(i); 18.45 + fullWaveData.add(b); 18.46 + } 18.47 + } 18.48 + 18.49 +}
19.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 19.2 +++ b/src/com/aurellem/capture/XuggleVideoRecorder.java Tue Oct 25 11:55:55 2011 -0700 19.3 @@ -0,0 +1,53 @@ 19.4 +package com.aurellem.capture; 19.5 + 19.6 + 19.7 +/** 19.8 + * Handles writing video files using Xuggle. 19.9 + * 19.10 + * 19.11 + * @author Robert McIntyre 19.12 + * 19.13 + */ 19.14 +/* 19.15 +public class XuggleVideoRecorder extends AbstractVideoRecorder{ 19.16 + 19.17 + 19.18 + IMediaWriter writer; 19.19 + BufferedImage frame; 19.20 + int videoChannel = 0; 19.21 + long currentTimeStamp = 0; 19.22 + boolean videoReady = false; 19.23 + 19.24 + 19.25 + public XuggleVideoRecorder(File output) throws IOException {super(output);} 19.26 + 19.27 + public void initVideo(){ 19.28 + this.frame = new BufferedImage( 19.29 + width, height, 19.30 + BufferedImage.TYPE_3BYTE_BGR); 19.31 + this.writer = ToolFactory.makeWriter(this.targetFileName); 19.32 + writer.addVideoStream(videoChannel, 19.33 + 0, IRational.make(fps), 19.34 + width, height); 19.35 + this.videoReady = true; 19.36 + } 19.37 + 19.38 + 19.39 + public void record(BufferedImage rawFrame) { 19.40 + if (!this.videoReady){initVideo();} 19.41 + // convert the Image into the form that Xuggle likes. 19.42 + this.frame.getGraphics().drawImage(rawFrame, 0, 0, null); 19.43 + writer.encodeVideo(videoChannel, 19.44 + frame, 19.45 + currentTimeStamp, TimeUnit.NANOSECONDS); 19.46 + 19.47 + currentTimeStamp += (long) (1000000000.0 / fps); 19.48 + } 19.49 + 19.50 + public void finish() { 19.51 + writer.close(); 19.52 + } 19.53 + 19.54 +} 19.55 + 19.56 +*/
20.1 --- a/src/com/aurellem/capture/hello/HelloAudio.java Tue Oct 25 11:18:59 2011 -0700 20.2 +++ b/src/com/aurellem/capture/hello/HelloAudio.java Tue Oct 25 11:55:55 2011 -0700 20.3 @@ -4,11 +4,11 @@ 20.4 import java.util.logging.Level; 20.5 import java.util.logging.Logger; 20.6 20.7 +import com.aurellem.capture.MultiListener; 20.8 +import com.aurellem.capture.WaveFileWriter; 20.9 import com.jme3.app.SimpleApplication; 20.10 import com.jme3.audio.AudioNode; 20.11 import com.jme3.audio.Listener; 20.12 -import com.jme3.capture.MultiListener; 20.13 -import com.jme3.capture.WaveFileWriter; 20.14 import com.jme3.input.controls.ActionListener; 20.15 import com.jme3.input.controls.MouseButtonTrigger; 20.16 import com.jme3.material.Material;
21.1 --- a/src/com/aurellem/capture/hello/HelloVideo.java Tue Oct 25 11:18:59 2011 -0700 21.2 +++ b/src/com/aurellem/capture/hello/HelloVideo.java Tue Oct 25 11:55:55 2011 -0700 21.3 @@ -3,10 +3,10 @@ 21.4 import java.io.File; 21.5 import java.io.IOException; 21.6 21.7 +import com.aurellem.capture.AVIVideoRecorder; 21.8 +import com.aurellem.capture.AbstractVideoRecorder; 21.9 +import com.aurellem.capture.Capture; 21.10 import com.jme3.app.SimpleApplication; 21.11 -import com.jme3.capture.AVIVideoRecorder; 21.12 -import com.jme3.capture.AbstractVideoRecorder; 21.13 -import com.jme3.capture.Capture; 21.14 import com.jme3.material.Material; 21.15 import com.jme3.math.ColorRGBA; 21.16 import com.jme3.math.Vector3f;