view src/ca/randelshofer/AVIOutputStream.java @ 20:bc6fbfbbadd9 spazz

retarded spazz dancing achieved
author Robert McIntyre <rlm@mit.edu>
date Sat, 29 Oct 2011 16:22:57 -0700
parents 4c5fc53778c1
children 784a3f4e6202
line wrap: on
line source
1 /**
2 * @(#)AVIOutputStream.java 1.5.1 2011-01-17
3 *
4 * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland.
5 * All rights reserved.
6 *
7 * You may not use, copy or modify this file, except in compliance with the
8 * license agreement you entered into with Werner Randelshofer.
9 * For details see accompanying license terms.
10 */
11 package ca.randelshofer;
13 import java.awt.Dimension;
14 import java.awt.image.BufferedImage;
15 import java.awt.image.DataBufferByte;
16 import java.awt.image.IndexColorModel;
17 import java.awt.image.WritableRaster;
18 import java.io.File;
19 import java.io.FileInputStream;
20 import java.io.IOException;
21 import java.io.InputStream;
22 import java.io.OutputStream;
23 import java.util.Arrays;
24 import java.util.Date;
25 import java.util.LinkedList;
27 import javax.imageio.IIOImage;
28 import javax.imageio.ImageIO;
29 import javax.imageio.ImageWriteParam;
30 import javax.imageio.ImageWriter;
31 import javax.imageio.stream.FileImageOutputStream;
32 import javax.imageio.stream.ImageOutputStream;
33 import javax.imageio.stream.MemoryCacheImageOutputStream;
35 /**
36 * This class supports writing of images into an AVI 1.0 video file.
37 * <p>
38 * The images are written as video frames.
39 * <p>
40 * Video frames can be encoded with one of the following formats:
41 * <ul>
42 * <li>JPEG</li>
43 * <li>PNG</li>
44 * <li>RAW</li>
45 * <li>RLE</li>
46 * </ul>
47 * All frames must have the same format.
48 * When JPG is used each frame can have an individual encoding quality.
49 * <p>
50 * All frames in an AVI file must have the same duration. The duration can
51 * be set by setting an appropriate pair of values using methods
52 * {@link #setFrameRate} and {@link #setTimeScale}.
53 * <p>
54 * The length of an AVI 1.0 file is limited to 1 GB.
55 * This class supports lengths of up to 4 GB, but such files may not work on
56 * all players.
57 * <p>
58 * For detailed information about the AVI RIFF file format see:<br>
59 * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br>
60 * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br>
61 * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br>
62 *
63 * @author Werner Randelshofer
64 * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream..
65 * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format.
66 * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets
67 * in "idx1" chunk.
68 * <br>1.3.2 2010-12-27 File size limit is 1 GB.
69 * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets.
70 * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream.
71 * Added method getVideoDimension().
72 * <br>1.2 2009-08-29 Adds support for RAW video format.
73 * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih
74 * chunk. Changed the API to reflect that AVI works with frame rates instead of
75 * with frame durations.
76 * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG
77 * encoded video.
78 * <br>1.0 2008-08-11 Created.
79 */
80 public class AVIOutputStream {
82 /**
83 * Underlying output stream.
84 */
85 private ImageOutputStream out;
86 /** The offset of the QuickTime stream in the underlying ImageOutputStream.
87 * Normally this is 0 unless the underlying stream already contained data
88 * when it was passed to the constructor.
89 */
90 private long streamOffset;
91 /** Previous frame for delta compression. */
93 /**
94 * Supported video encodings.
95 */
96 public static enum VideoFormat {
98 RAW, RLE, JPG, PNG;
99 }
100 /**
101 * Current video formats.
102 */
103 private VideoFormat videoFormat;
104 /**
105 * Quality of JPEG encoded video frames.
106 */
107 private float quality = 0.9f;
108 /**
109 * Width of the video frames. All frames must have the same width.
110 * The value -1 is used to mark unspecified width.
111 */
112 private int imgWidth = -1;
113 /**
114 * Height of the video frames. All frames must have the same height.
115 * The value -1 is used to mark unspecified height.
116 */
117 private int imgHeight = -1;
118 /** Number of bits per pixel. */
119 private int imgDepth = 24;
120 /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */
121 private IndexColorModel palette;
122 private IndexColorModel previousPalette;
123 /** Video encoder. */
125 /**
126 * The timeScale of the movie.
127 * <p>
128 * Used with frameRate to specify the time scale that this stream will use.
129 * Dividing frameRate by timeScale gives the number of samples per second.
130 * For video streams, this is the frame rate. For audio streams, this rate
131 * corresponds to the time needed to play nBlockAlign bytes of audio, which
132 * for PCM audio is the just the sample rate.
133 */
134 private int timeScale = 1;
135 /**
136 * The frameRate of the movie in timeScale units.
137 * <p>
138 * @see timeScale
139 */
140 private int frameRate = 30;
141 /**
142 * The states of the movie output stream.
143 */
144 private static enum States {
146 STARTED, FINISHED, CLOSED;
147 }
148 /**
149 * The current state of the movie output stream.
150 */
151 private States state = States.FINISHED;
153 /**
154 * AVI stores media data in samples.
155 * A sample is a single element in a sequence of time-ordered data.
156 */
157 private static class Sample {
159 String chunkType;
160 /** Offset of the sample relative to the start of the AVI file.
161 */
162 long offset;
163 /** Data length of the sample. */
164 long length;
165 /** Whether the sample is a sync-sample. */
166 boolean isSync;
168 /**
169 * Creates a new sample.
170 * @param duration
171 * @param offset
172 * @param length
173 */
174 public Sample(String chunkId, int duration, long offset, long length, boolean isSync) {
175 this.chunkType = chunkId;
176 this.offset = offset;
177 this.length = length;
178 this.isSync = isSync;
179 }
180 }
181 /**
182 * List of video frames.
183 */
184 private LinkedList<Sample> videoFrames;
185 /**
186 * This chunk holds the whole AVI content.
187 */
188 private CompositeChunk aviChunk;
189 /**
190 * This chunk holds the movie frames.
191 */
192 private CompositeChunk moviChunk;
193 /**
194 * This chunk holds the AVI Main Header.
195 */
196 FixedSizeDataChunk avihChunk;
197 /**
198 * This chunk holds the AVI Stream Header.
199 */
200 FixedSizeDataChunk strhChunk;
201 /**
202 * This chunk holds the AVI Stream Format Header.
203 */
204 FixedSizeDataChunk strfChunk;
206 /**
207 * Chunk base class.
208 */
209 private abstract class Chunk {
211 /**
212 * The chunkType of the chunk. A String with the length of 4 characters.
213 */
214 protected String chunkType;
215 /**
216 * The offset of the chunk relative to the start of the
217 * ImageOutputStream.
218 */
219 protected long offset;
221 /**
222 * Creates a new Chunk at the current position of the ImageOutputStream.
223 * @param chunkType The chunkType of the chunk. A string with a length of 4 characters.
224 */
225 public Chunk(String chunkType) throws IOException {
226 this.chunkType = chunkType;
227 offset = getRelativeStreamPosition();
228 }
230 /**
231 * Writes the chunk to the ImageOutputStream and disposes it.
232 */
233 public abstract void finish() throws IOException;
235 /**
236 * Returns the size of the chunk including the size of the chunk header.
237 * @return The size of the chunk.
238 */
239 public abstract long size();
240 }
242 /**
243 * A CompositeChunk contains an ordered list of Chunks.
244 */
245 private class CompositeChunk extends Chunk {
247 /**
248 * The type of the composite. A String with the length of 4 characters.
249 */
250 protected String compositeType;
251 private LinkedList<Chunk> children;
252 private boolean finished;
254 /**
255 * Creates a new CompositeChunk at the current position of the
256 * ImageOutputStream.
257 * @param compositeType The type of the composite.
258 * @param chunkType The type of the chunk.
259 */
260 public CompositeChunk(String compositeType, String chunkType) throws IOException {
261 super(chunkType);
262 this.compositeType = compositeType;
263 //out.write
264 out.writeLong(0); // make room for the chunk header
265 out.writeInt(0); // make room for the chunk header
266 children = new LinkedList<Chunk>();
267 }
269 public void add(Chunk child) throws IOException {
270 if (children.size() > 0) {
271 children.getLast().finish();
272 }
273 children.add(child);
274 }
276 /**
277 * Writes the chunk and all its children to the ImageOutputStream
278 * and disposes of all resources held by the chunk.
279 * @throws java.io.IOException
280 */
281 @Override
282 public void finish() throws IOException {
283 if (!finished) {
284 if (size() > 0xffffffffL) {
285 throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size());
286 }
288 long pointer = getRelativeStreamPosition();
289 seekRelative(offset);
291 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
292 headerData.writeType(compositeType);
293 headerData.writeUInt(size() - 8);
294 headerData.writeType(chunkType);
295 for (Chunk child : children) {
296 child.finish();
297 }
298 seekRelative(pointer);
299 if (size() % 2 == 1) {
300 out.writeByte(0); // write pad byte
301 }
302 finished = true;
303 }
304 }
306 @Override
307 public long size() {
308 long length = 12;
309 for (Chunk child : children) {
310 length += child.size() + child.size() % 2;
311 }
312 return length;
313 }
314 }
316 /**
317 * Data Chunk.
318 */
319 private class DataChunk extends Chunk {
321 private DataChunkOutputStream data;
322 private boolean finished;
324 /**
325 * Creates a new DataChunk at the current position of the
326 * ImageOutputStream.
327 * @param chunkType The chunkType of the chunk.
328 */
329 public DataChunk(String name) throws IOException {
330 super(name);
331 out.writeLong(0); // make room for the chunk header
332 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false);
333 }
335 public DataChunkOutputStream getOutputStream() {
336 if (finished) {
337 throw new IllegalStateException("DataChunk is finished");
338 }
339 return data;
340 }
342 @Override
343 public void finish() throws IOException {
344 if (!finished) {
345 long sizeBefore = size();
347 if (size() > 0xffffffffL) {
348 throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size());
349 }
351 long pointer = getRelativeStreamPosition();
352 seekRelative(offset);
354 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
355 headerData.writeType(chunkType);
356 headerData.writeUInt(size() - 8);
357 seekRelative(pointer);
358 if (size() % 2 == 1) {
359 out.writeByte(0); // write pad byte
360 }
361 finished = true;
362 long sizeAfter = size();
363 if (sizeBefore != sizeAfter) {
364 System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter);
365 }
366 }
367 }
369 @Override
370 public long size() {
371 return 8 + data.size();
372 }
373 }
375 /**
376 * A DataChunk with a fixed size.
377 */
378 private class FixedSizeDataChunk extends Chunk {
380 private DataChunkOutputStream data;
381 private boolean finished;
382 private long fixedSize;
384 /**
385 * Creates a new DataChunk at the current position of the
386 * ImageOutputStream.
387 * @param chunkType The chunkType of the chunk.
388 */
389 public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException {
390 super(chunkType);
391 this.fixedSize = fixedSize;
392 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false);
393 data.writeType(chunkType);
394 data.writeUInt(fixedSize);
395 data.clearCount();
397 // Fill fixed size with nulls
398 byte[] buf = new byte[(int) Math.min(512, fixedSize)];
399 long written = 0;
400 while (written < fixedSize) {
401 data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written));
402 written += Math.min(buf.length, fixedSize - written);
403 }
404 if (fixedSize % 2 == 1) {
405 out.writeByte(0); // write pad byte
406 }
407 seekToStartOfData();
408 }
410 public DataChunkOutputStream getOutputStream() {
411 /*if (finished) {
412 throw new IllegalStateException("DataChunk is finished");
413 }*/
414 return data;
415 }
417 public void seekToStartOfData() throws IOException {
418 seekRelative(offset + 8);
419 data.clearCount();
420 }
422 public void seekToEndOfChunk() throws IOException {
423 seekRelative(offset + 8 + fixedSize + fixedSize % 2);
424 }
426 @Override
427 public void finish() throws IOException {
428 if (!finished) {
429 finished = true;
430 }
431 }
433 @Override
434 public long size() {
435 return 8 + fixedSize;
436 }
437 }
439 /**
440 * Creates a new AVI file with the specified video format and
441 * frame rate. The video has 24 bits per pixel.
442 *
443 * @param file the output file
444 * @param format Selects an encoder for the video format.
445 * @param bitsPerPixel the number of bits per pixel.
446 * @exception IllegalArgumentException if videoFormat is null or if
447 * frame rate is <= 0
448 */
449 public AVIOutputStream(File file, VideoFormat format) throws IOException {
450 this(file,format,24);
451 }
452 /**
453 * Creates a new AVI file with the specified video format and
454 * frame rate.
455 *
456 * @param file the output file
457 * @param format Selects an encoder for the video format.
458 * @param bitsPerPixel the number of bits per pixel.
459 * @exception IllegalArgumentException if videoFormat is null or if
460 * frame rate is <= 0
461 */
462 public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException {
463 if (format == null) {
464 throw new IllegalArgumentException("format must not be null");
465 }
467 if (file.exists()) {
468 file.delete();
469 }
470 this.out = new FileImageOutputStream(file);
471 this.streamOffset = 0;
472 this.videoFormat = format;
473 this.videoFrames = new LinkedList<Sample>();
474 this.imgDepth = bitsPerPixel;
475 if (imgDepth == 4) {
476 byte[] gray = new byte[16];
477 for (int i = 0; i < gray.length; i++) {
478 gray[i] = (byte) ((i << 4) | i);
479 }
480 palette = new IndexColorModel(4, 16, gray, gray, gray);
481 } else if (imgDepth == 8) {
482 byte[] gray = new byte[256];
483 for (int i = 0; i < gray.length; i++) {
484 gray[i] = (byte) i;
485 }
486 palette = new IndexColorModel(8, 256, gray, gray, gray);
487 }
489 }
491 /**
492 * Creates a new AVI output stream with the specified video format and
493 * framerate.
494 *
495 * @param out the underlying output stream
496 * @param format Selects an encoder for the video format.
497 * @exception IllegalArgumentException if videoFormat is null or if
498 * framerate is <= 0
499 */
500 public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException {
501 if (format == null) {
502 throw new IllegalArgumentException("format must not be null");
503 }
504 this.out = out;
505 this.streamOffset = out.getStreamPosition();
506 this.videoFormat = format;
507 this.videoFrames = new LinkedList<Sample>();
508 }
510 /**
511 * Used with frameRate to specify the time scale that this stream will use.
512 * Dividing frameRate by timeScale gives the number of samples per second.
513 * For video streams, this is the frame rate. For audio streams, this rate
514 * corresponds to the time needed to play nBlockAlign bytes of audio, which
515 * for PCM audio is the just the sample rate.
516 * <p>
517 * The default value is 1.
518 *
519 * @param newValue
520 */
521 public void setTimeScale(int newValue) {
522 if (newValue <= 0) {
523 throw new IllegalArgumentException("timeScale must be greater 0");
524 }
525 this.timeScale = newValue;
526 }
528 /**
529 * Returns the time scale of this media.
530 *
531 * @return time scale
532 */
533 public int getTimeScale() {
534 return timeScale;
535 }
537 /**
538 * Sets the rate of video frames in time scale units.
539 * <p>
540 * The default value is 30. Together with the default value 1 of timeScale
541 * this results in 30 frames pers second.
542 *
543 * @param newValue
544 */
545 public void setFrameRate(int newValue) {
546 if (newValue <= 0) {
547 throw new IllegalArgumentException("frameDuration must be greater 0");
548 }
549 if (state == States.STARTED) {
550 throw new IllegalStateException("frameDuration must be set before the first frame is written");
551 }
552 this.frameRate = newValue;
553 }
555 /**
556 * Returns the frame rate of this media.
557 *
558 * @return frame rate
559 */
560 public int getFrameRate() {
561 return frameRate;
562 }
564 /** Sets the global color palette. */
565 public void setPalette(IndexColorModel palette) {
566 this.palette = palette;
567 }
569 /**
570 * Sets the compression quality of the video track.
571 * A value of 0 stands for "high compression is important" a value of
572 * 1 for "high image quality is important".
573 * <p>
574 * Changing this value affects frames which are subsequently written
575 * to the AVIOutputStream. Frames which have already been written
576 * are not changed.
577 * <p>
578 * This value has only effect on videos encoded with JPG format.
579 * <p>
580 * The default value is 0.9.
581 *
582 * @param newValue
583 */
584 public void setVideoCompressionQuality(float newValue) {
585 this.quality = newValue;
586 }
588 /**
589 * Returns the video compression quality.
590 *
591 * @return video compression quality
592 */
593 public float getVideoCompressionQuality() {
594 return quality;
595 }
597 /**
598 * Sets the dimension of the video track.
599 * <p>
600 * You need to explicitly set the dimension, if you add all frames from
601 * files or input streams.
602 * <p>
603 * If you add frames from buffered images, then AVIOutputStream
604 * can determine the video dimension from the image width and height.
605 *
606 * @param width Must be greater than 0.
607 * @param height Must be greater than 0.
608 */
609 public void setVideoDimension(int width, int height) {
610 if (width < 1 || height < 1) {
611 throw new IllegalArgumentException("width and height must be greater zero.");
612 }
613 this.imgWidth = width;
614 this.imgHeight = height;
615 }
617 /**
618 * Gets the dimension of the video track.
619 * <p>
620 * Returns null if the dimension is not known.
621 */
622 public Dimension getVideoDimension() {
623 if (imgWidth < 1 || imgHeight < 1) {
624 return null;
625 }
626 return new Dimension(imgWidth, imgHeight);
627 }
629 /**
630 * Sets the state of the QuickTimeOutpuStream to started.
631 * <p>
632 * If the state is changed by this method, the prolog is
633 * written.
634 */
635 private void ensureStarted() throws IOException {
636 if (state != States.STARTED) {
637 new Date();
638 writeProlog();
639 state = States.STARTED;
640 }
641 }
643 /**
644 * Writes a frame to the video track.
645 * <p>
646 * If the dimension of the video track has not been specified yet, it
647 * is derived from the first buffered image added to the AVIOutputStream.
648 *
649 * @param image The frame image.
650 *
651 * @throws IllegalArgumentException if the duration is less than 1, or
652 * if the dimension of the frame does not match the dimension of the video
653 * track.
654 * @throws IOException if writing the image failed.
655 */
656 public void writeFrame(BufferedImage image) throws IOException {
657 ensureOpen();
658 ensureStarted();
660 // Get the dimensions of the first image
661 if (imgWidth == -1) {
662 imgWidth = image.getWidth();
663 imgHeight = image.getHeight();
664 } else {
665 // The dimension of the image must match the dimension of the video track
666 if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) {
667 throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size()
668 + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
669 + ") differs from image[0] (width="
670 + imgWidth + ", height=" + imgHeight);
671 }
672 }
674 DataChunk videoFrameChunk;
675 long offset = getRelativeStreamPosition();
676 boolean isSync = true;
677 switch (videoFormat) {
678 case RAW: {
679 switch (imgDepth) {
680 case 4: {
681 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
682 int[] imgRGBs = new int[16];
683 imgPalette.getRGBs(imgRGBs);
684 int[] previousRGBs = new int[16];
685 if (previousPalette == null) {
686 previousPalette = palette;
687 }
688 previousPalette.getRGBs(previousRGBs);
689 if (!Arrays.equals(imgRGBs, previousRGBs)) {
690 previousPalette = imgPalette;
691 DataChunk paletteChangeChunk = new DataChunk("00pc");
692 /*
693 int first = imgPalette.getMapSize();
694 int last = -1;
695 for (int i = 0; i < 16; i++) {
696 if (previousRGBs[i] != imgRGBs[i] && i < first) {
697 first = i;
698 }
699 if (previousRGBs[i] != imgRGBs[i] && i > last) {
700 last = i;
701 }
702 }*/
703 int first = 0;
704 int last = imgPalette.getMapSize() - 1;
705 /*
706 * typedef struct {
707 BYTE bFirstEntry;
708 BYTE bNumEntries;
709 WORD wFlags;
710 PALETTEENTRY peNew[];
711 } AVIPALCHANGE;
712 *
713 * typedef struct tagPALETTEENTRY {
714 BYTE peRed;
715 BYTE peGreen;
716 BYTE peBlue;
717 BYTE peFlags;
718 } PALETTEENTRY;
719 */
720 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
721 pOut.writeByte(first);//bFirstEntry
722 pOut.writeByte(last - first + 1);//bNumEntries
723 pOut.writeShort(0);//wFlags
725 for (int i = first; i <= last; i++) {
726 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
727 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
728 pOut.writeByte(imgRGBs[i] & 0xff); // blue
729 pOut.writeByte(0); // reserved*/
730 }
732 moviChunk.add(paletteChangeChunk);
733 paletteChangeChunk.finish();
734 long length = getRelativeStreamPosition() - offset;
735 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
736 offset = getRelativeStreamPosition();
737 }
739 videoFrameChunk = new DataChunk("00db");
740 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
741 byte[] rgb4 = new byte[imgWidth / 2];
742 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
743 for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) {
744 rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf));
745 }
746 videoFrameChunk.getOutputStream().write(rgb4);
747 }
748 break;
749 }
750 case 8: {
751 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
752 int[] imgRGBs = new int[256];
753 imgPalette.getRGBs(imgRGBs);
754 int[] previousRGBs = new int[256];
755 if (previousPalette == null) {
756 previousPalette = palette;
757 }
758 previousPalette.getRGBs(previousRGBs);
759 if (!Arrays.equals(imgRGBs, previousRGBs)) {
760 previousPalette = imgPalette;
761 DataChunk paletteChangeChunk = new DataChunk("00pc");
762 /*
763 int first = imgPalette.getMapSize();
764 int last = -1;
765 for (int i = 0; i < 16; i++) {
766 if (previousRGBs[i] != imgRGBs[i] && i < first) {
767 first = i;
768 }
769 if (previousRGBs[i] != imgRGBs[i] && i > last) {
770 last = i;
771 }
772 }*/
773 int first = 0;
774 int last = imgPalette.getMapSize() - 1;
775 /*
776 * typedef struct {
777 BYTE bFirstEntry;
778 BYTE bNumEntries;
779 WORD wFlags;
780 PALETTEENTRY peNew[];
781 } AVIPALCHANGE;
782 *
783 * typedef struct tagPALETTEENTRY {
784 BYTE peRed;
785 BYTE peGreen;
786 BYTE peBlue;
787 BYTE peFlags;
788 } PALETTEENTRY;
789 */
790 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream();
791 pOut.writeByte(first);//bFirstEntry
792 pOut.writeByte(last - first + 1);//bNumEntries
793 pOut.writeShort(0);//wFlags
795 for (int i = first; i <= last; i++) {
796 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
797 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
798 pOut.writeByte(imgRGBs[i] & 0xff); // blue
799 pOut.writeByte(0); // reserved*/
800 }
802 moviChunk.add(paletteChangeChunk);
803 paletteChangeChunk.finish();
804 long length = getRelativeStreamPosition() - offset;
805 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false));
806 offset = getRelativeStreamPosition();
807 }
809 videoFrameChunk = new DataChunk("00db");
810 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
811 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down
812 videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth);
813 }
814 break;
815 }
816 default: {
817 videoFrameChunk = new DataChunk("00db");
818 WritableRaster raster = image.getRaster();
819 int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data
820 byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data
821 for (int y = imgHeight - 1; y >= 0; --y) { // Upside down
822 raster.getPixels(0, y, imgWidth, 1, raw);
823 for (int x = 0, n = imgWidth * 3; x < n; x += 3) {
824 bytes[x + 2] = (byte) raw[x]; // Blue
825 bytes[x + 1] = (byte) raw[x + 1]; // Green
826 bytes[x] = (byte) raw[x + 2]; // Red
827 }
828 videoFrameChunk.getOutputStream().write(bytes);
829 }
830 break;
831 }
832 }
833 break;
834 }
836 case JPG: {
837 videoFrameChunk = new DataChunk("00dc");
838 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next();
839 ImageWriteParam iwParam = iw.getDefaultWriteParam();
840 iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
841 iwParam.setCompressionQuality(quality);
842 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
843 iw.setOutput(imgOut);
844 IIOImage img = new IIOImage(image, null, null);
845 iw.write(null, img, iwParam);
846 iw.dispose();
847 break;
848 }
849 case PNG:
850 default: {
851 videoFrameChunk = new DataChunk("00dc");
852 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next();
853 ImageWriteParam iwParam = iw.getDefaultWriteParam();
854 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream());
855 iw.setOutput(imgOut);
856 IIOImage img = new IIOImage(image, null, null);
857 iw.write(null, img, iwParam);
858 iw.dispose();
859 break;
860 }
861 }
862 long length = getRelativeStreamPosition() - offset;
863 moviChunk.add(videoFrameChunk);
864 videoFrameChunk.finish();
866 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync));
867 if (getRelativeStreamPosition() > 1L << 32) {
868 throw new IOException("AVI file is larger than 4 GB");
869 }
870 }
872 /**
873 * Writes a frame from a file to the video track.
874 * <p>
875 * This method does not inspect the contents of the file.
876 * For example, Its your responsibility to only add JPG files if you have
877 * chosen the JPEG video format.
878 * <p>
879 * If you add all frames from files or from input streams, then you
880 * have to explicitly set the dimension of the video track before you
881 * call finish() or close().
882 *
883 * @param file The file which holds the image data.
884 *
885 * @throws IllegalStateException if the duration is less than 1.
886 * @throws IOException if writing the image failed.
887 */
888 public void writeFrame(File file) throws IOException {
889 FileInputStream in = null;
890 try {
891 in = new FileInputStream(file);
892 writeFrame(in);
893 } finally {
894 if (in != null) {
895 in.close();
896 }
897 }
898 }
900 /**
901 * Writes a frame to the video track.
902 * <p>
903 * This method does not inspect the contents of the file.
904 * For example, its your responsibility to only add JPG files if you have
905 * chosen the JPEG video format.
906 * <p>
907 * If you add all frames from files or from input streams, then you
908 * have to explicitly set the dimension of the video track before you
909 * call finish() or close().
910 *
911 * @param in The input stream which holds the image data.
912 *
913 * @throws IllegalArgumentException if the duration is less than 1.
914 * @throws IOException if writing the image failed.
915 */
916 public void writeFrame(InputStream in) throws IOException {
917 ensureOpen();
918 ensureStarted();
920 DataChunk videoFrameChunk = new DataChunk(
921 videoFormat == VideoFormat.RAW ? "00db" : "00dc");
922 moviChunk.add(videoFrameChunk);
923 OutputStream mdatOut = videoFrameChunk.getOutputStream();
924 long offset = getRelativeStreamPosition();
925 byte[] buf = new byte[512];
926 int len;
927 while ((len = in.read(buf)) != -1) {
928 mdatOut.write(buf, 0, len);
929 }
930 long length = getRelativeStreamPosition() - offset;
931 videoFrameChunk.finish();
932 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true));
933 if (getRelativeStreamPosition() > 1L << 32) {
934 throw new IOException("AVI file is larger than 4 GB");
935 }
936 }
938 /**
939 * Closes the movie file as well as the stream being filtered.
940 *
941 * @exception IOException if an I/O error has occurred
942 */
943 public void close() throws IOException {
944 if (state == States.STARTED) {
945 finish();
946 }
947 if (state != States.CLOSED) {
948 out.close();
949 state = States.CLOSED;
950 }
951 }
953 /**
954 * Finishes writing the contents of the AVI output stream without closing
955 * the underlying stream. Use this method when applying multiple filters
956 * in succession to the same output stream.
957 *
958 * @exception IllegalStateException if the dimension of the video track
959 * has not been specified or determined yet.
960 * @exception IOException if an I/O exception has occurred
961 */
962 public void finish() throws IOException {
963 ensureOpen();
964 if (state != States.FINISHED) {
965 if (imgWidth == -1 || imgHeight == -1) {
966 throw new IllegalStateException("image width and height must be specified");
967 }
969 moviChunk.finish();
970 writeEpilog();
971 state = States.FINISHED;
972 imgWidth = imgHeight = -1;
973 }
974 }
976 /**
977 * Check to make sure that this stream has not been closed
978 */
979 private void ensureOpen() throws IOException {
980 if (state == States.CLOSED) {
981 throw new IOException("Stream closed");
982 }
983 }
985 /** Gets the position relative to the beginning of the QuickTime stream.
986 * <p>
987 * Usually this value is equal to the stream position of the underlying
988 * ImageOutputStream, but can be larger if the underlying stream already
989 * contained data.
990 *
991 * @return The relative stream position.
992 * @throws IOException
993 */
994 private long getRelativeStreamPosition() throws IOException {
995 return out.getStreamPosition() - streamOffset;
996 }
998 /** Seeks relative to the beginning of the QuickTime stream.
999 * <p>
1000 * Usually this equal to seeking in the underlying ImageOutputStream, but
1001 * can be different if the underlying stream already contained data.
1003 */
1004 private void seekRelative(long newPosition) throws IOException {
1005 out.seek(newPosition + streamOffset);
1008 private void writeProlog() throws IOException {
1009 // The file has the following structure:
1010 //
1011 // .RIFF AVI
1012 // ..avih (AVI Header Chunk)
1013 // ..LIST strl
1014 // ...strh (Stream Header Chunk)
1015 // ...strf (Stream Format Chunk)
1016 // ..LIST movi
1017 // ...00dc (Compressed video data chunk in Track 00, repeated for each frame)
1018 // ..idx1 (List of video data chunks and their location in the file)
1020 // The RIFF AVI Chunk holds the complete movie
1021 aviChunk = new CompositeChunk("RIFF", "AVI ");
1022 CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl");
1024 // Write empty AVI Main Header Chunk - we fill the data in later
1025 aviChunk.add(hdrlChunk);
1026 avihChunk = new FixedSizeDataChunk("avih", 56);
1027 avihChunk.seekToEndOfChunk();
1028 hdrlChunk.add(avihChunk);
1030 CompositeChunk strlChunk = new CompositeChunk("LIST", "strl");
1031 hdrlChunk.add(strlChunk);
1033 // Write empty AVI Stream Header Chunk - we fill the data in later
1034 strhChunk = new FixedSizeDataChunk("strh", 56);
1035 strhChunk.seekToEndOfChunk();
1036 strlChunk.add(strhChunk);
1037 strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4);
1038 strfChunk.seekToEndOfChunk();
1039 strlChunk.add(strfChunk);
1041 moviChunk = new CompositeChunk("LIST", "movi");
1042 aviChunk.add(moviChunk);
1047 private void writeEpilog() throws IOException {
1049 long bufferSize = 0;
1050 for (Sample s : videoFrames) {
1051 if (s.length > bufferSize) {
1052 bufferSize = s.length;
1057 DataChunkOutputStream d;
1059 /* Create Idx1 Chunk and write data
1060 * -------------
1061 typedef struct _avioldindex {
1062 FOURCC fcc;
1063 DWORD cb;
1064 struct _avioldindex_entry {
1065 DWORD dwChunkId;
1066 DWORD dwFlags;
1067 DWORD dwOffset;
1068 DWORD dwSize;
1069 } aIndex[];
1070 } AVIOLDINDEX;
1071 */
1072 DataChunk idx1Chunk = new DataChunk("idx1");
1073 aviChunk.add(idx1Chunk);
1074 d = idx1Chunk.getOutputStream();
1075 long moviListOffset = moviChunk.offset + 8;
1076 //moviListOffset = 0;
1077 for (Sample f : videoFrames) {
1079 d.writeType(f.chunkType); // dwChunkId
1080 // Specifies a FOURCC that identifies a stream in the AVI file. The
1081 // FOURCC must have the form 'xxyy' where xx is the stream number and yy
1082 // is a two-character code that identifies the contents of the stream:
1083 //
1084 // Two-character code Description
1085 // db Uncompressed video frame
1086 // dc Compressed video frame
1087 // pc Palette change
1088 // wb Audio data
1090 d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)//
1091 | (f.isSync ? 0x10 : 0x0)); // dwFlags
1092 // Specifies a bitwise combination of zero or more of the following
1093 // flags:
1094 //
1095 // Value Name Description
1096 // 0x10 AVIIF_KEYFRAME The data chunk is a key frame.
1097 // 0x1 AVIIF_LIST The data chunk is a 'rec ' list.
1098 // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the
1099 // stream. For example, this flag should be set for
1100 // palette changes.
1102 d.writeUInt(f.offset - moviListOffset); // dwOffset
1103 // Specifies the location of the data chunk in the file. The value
1104 // should be specified as an offset, in bytes, from the start of the
1105 // 'movi' list; however, in some AVI files it is given as an offset from
1106 // the start of the file.
1108 d.writeUInt(f.length); // dwSize
1109 // Specifies the size of the data chunk, in bytes.
1111 idx1Chunk.finish();
1113 /* Write Data into AVI Main Header Chunk
1114 * -------------
1115 * The AVIMAINHEADER structure defines global information in an AVI file.
1116 * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx
1117 typedef struct _avimainheader {
1118 FOURCC fcc;
1119 DWORD cb;
1120 DWORD dwMicroSecPerFrame;
1121 DWORD dwMaxBytesPerSec;
1122 DWORD dwPaddingGranularity;
1123 DWORD dwFlags;
1124 DWORD dwTotalFrames;
1125 DWORD dwInitialFrames;
1126 DWORD dwStreams;
1127 DWORD dwSuggestedBufferSize;
1128 DWORD dwWidth;
1129 DWORD dwHeight;
1130 DWORD dwReserved[4];
1131 } AVIMAINHEADER; */
1132 avihChunk.seekToStartOfData();
1133 d = avihChunk.getOutputStream();
1135 d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame
1136 // Specifies the number of microseconds between frames.
1137 // This value indicates the overall timing for the file.
1139 d.writeUInt(0); // dwMaxBytesPerSec
1140 // Specifies the approximate maximum data rate of the file.
1141 // This value indicates the number of bytes per second the system
1142 // must handle to present an AVI sequence as specified by the other
1143 // parameters contained in the main header and stream header chunks.
1145 d.writeUInt(0); // dwPaddingGranularity
1146 // Specifies the alignment for data, in bytes. Pad the data to multiples
1147 // of this value.
1149 d.writeUInt(0x10); // dwFlags (0x10 == hasIndex)
1150 // Contains a bitwise combination of zero or more of the following
1151 // flags:
1152 //
1153 // Value Name Description
1154 // 0x10 AVIF_HASINDEX Indicates the AVI file has an index.
1155 // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the
1156 // index, rather than the physical ordering of the
1157 // chunks in the file, to determine the order of
1158 // presentation of the data. For example, this flag
1159 // could be used to create a list of frames for
1160 // editing.
1161 // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved.
1162 // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially
1163 // allocated file used for capturing real-time
1164 // video. Applications should warn the user before
1165 // writing over a file with this flag set because
1166 // the user probably defragmented this file.
1167 // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted
1168 // data and software. When this flag is used,
1169 // software should not permit the data to be
1170 // duplicated.
1172 d.writeUInt(videoFrames.size()); // dwTotalFrames
1173 // Specifies the total number of frames of data in the file.
1175 d.writeUInt(0); // dwInitialFrames
1176 // Specifies the initial frame for interleaved files. Noninterleaved
1177 // files should specify zero. If you are creating interleaved files,
1178 // specify the number of frames in the file prior to the initial frame
1179 // of the AVI sequence in this member.
1180 // To give the audio driver enough audio to work with, the audio data in
1181 // an interleaved file must be skewed from the video data. Typically,
1182 // the audio data should be moved forward enough frames to allow
1183 // approximately 0.75 seconds of audio data to be preloaded. The
1184 // dwInitialRecords member should be set to the number of frames the
1185 // audio is skewed. Also set the same value for the dwInitialFrames
1186 // member of the AVISTREAMHEADER structure in the audio stream header
1188 d.writeUInt(1); // dwStreams
1189 // Specifies the number of streams in the file. For example, a file with
1190 // audio and video has two streams.
1192 d.writeUInt(bufferSize); // dwSuggestedBufferSize
1193 // Specifies the suggested buffer size for reading the file. Generally,
1194 // this size should be large enough to contain the largest chunk in the
1195 // file. If set to zero, or if it is too small, the playback software
1196 // will have to reallocate memory during playback, which will reduce
1197 // performance. For an interleaved file, the buffer size should be large
1198 // enough to read an entire record, and not just a chunk.
1201 d.writeUInt(imgWidth); // dwWidth
1202 // Specifies the width of the AVI file in pixels.
1204 d.writeUInt(imgHeight); // dwHeight
1205 // Specifies the height of the AVI file in pixels.
1207 d.writeUInt(0); // dwReserved[0]
1208 d.writeUInt(0); // dwReserved[1]
1209 d.writeUInt(0); // dwReserved[2]
1210 d.writeUInt(0); // dwReserved[3]
1211 // Reserved. Set this array to zero.
1213 /* Write Data into AVI Stream Header Chunk
1214 * -------------
1215 * The AVISTREAMHEADER structure contains information about one stream
1216 * in an AVI file.
1217 * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx
1218 typedef struct _avistreamheader {
1219 FOURCC fcc;
1220 DWORD cb;
1221 FOURCC fccType;
1222 FOURCC fccHandler;
1223 DWORD dwFlags;
1224 WORD wPriority;
1225 WORD wLanguage;
1226 DWORD dwInitialFrames;
1227 DWORD dwScale;
1228 DWORD dwRate;
1229 DWORD dwStart;
1230 DWORD dwLength;
1231 DWORD dwSuggestedBufferSize;
1232 DWORD dwQuality;
1233 DWORD dwSampleSize;
1234 struct {
1235 short int left;
1236 short int top;
1237 short int right;
1238 short int bottom;
1239 } rcFrame;
1240 } AVISTREAMHEADER;
1241 */
1242 strhChunk.seekToStartOfData();
1243 d = strhChunk.getOutputStream();
1244 d.writeType("vids"); // fccType - vids for video stream
1245 // Contains a FOURCC that specifies the type of the data contained in
1246 // the stream. The following standard AVI values for video and audio are
1247 // defined:
1248 //
1249 // FOURCC Description
1250 // 'auds' Audio stream
1251 // 'mids' MIDI stream
1252 // 'txts' Text stream
1253 // 'vids' Video stream
1255 switch (videoFormat) {
1256 case RAW:
1257 d.writeType("DIB "); // fccHandler - DIB for Raw RGB
1258 break;
1259 case RLE:
1260 d.writeType("RLE "); // fccHandler - Microsoft RLE
1261 break;
1262 case JPG:
1263 d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG
1264 break;
1265 case PNG:
1266 default:
1267 d.writeType("png "); // fccHandler - png for PNG
1268 break;
1270 // Optionally, contains a FOURCC that identifies a specific data
1271 // handler. The data handler is the preferred handler for the stream.
1272 // For audio and video streams, this specifies the codec for decoding
1273 // the stream.
1275 if (imgDepth <= 8) {
1276 d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES
1277 } else {
1278 d.writeUInt(0); // dwFlags
1281 // Contains any flags for the data stream. The bits in the high-order
1282 // word of these flags are specific to the type of data contained in the
1283 // stream. The following standard flags are defined:
1284 //
1285 // Value Name Description
1286 // AVISF_DISABLED 0x00000001 Indicates this stream should not
1287 // be enabled by default.
1288 // AVISF_VIDEO_PALCHANGES 0x00010000
1289 // Indicates this video stream contains
1290 // palette changes. This flag warns the playback
1291 // software that it will need to animate the
1292 // palette.
1294 d.writeUShort(0); // wPriority
1295 // Specifies priority of a stream type. For example, in a file with
1296 // multiple audio streams, the one with the highest priority might be
1297 // the default stream.
1299 d.writeUShort(0); // wLanguage
1300 // Language tag.
1302 d.writeUInt(0); // dwInitialFrames
1303 // Specifies how far audio data is skewed ahead of the video frames in
1304 // interleaved files. Typically, this is about 0.75 seconds. If you are
1305 // creating interleaved files, specify the number of frames in the file
1306 // prior to the initial frame of the AVI sequence in this member. For
1307 // more information, see the remarks for the dwInitialFrames member of
1308 // the AVIMAINHEADER structure.
1310 d.writeUInt(timeScale); // dwScale
1311 // Used with dwRate to specify the time scale that this stream will use.
1312 // Dividing dwRate by dwScale gives the number of samples per second.
1313 // For video streams, this is the frame rate. For audio streams, this
1314 // rate corresponds to the time needed to play nBlockAlign bytes of
1315 // audio, which for PCM audio is the just the sample rate.
1317 d.writeUInt(frameRate); // dwRate
1318 // See dwScale.
1320 d.writeUInt(0); // dwStart
1321 // Specifies the starting time for this stream. The units are defined by
1322 // the dwRate and dwScale members in the main file header. Usually, this
1323 // is zero, but it can specify a delay time for a stream that does not
1324 // start concurrently with the file.
1326 d.writeUInt(videoFrames.size()); // dwLength
1327 // Specifies the length of this stream. The units are defined by the
1328 // dwRate and dwScale members of the stream's header.
1330 d.writeUInt(bufferSize); // dwSuggestedBufferSize
1331 // Specifies how large a buffer should be used to read this stream.
1332 // Typically, this contains a value corresponding to the largest chunk
1333 // present in the stream. Using the correct buffer size makes playback
1334 // more efficient. Use zero if you do not know the correct buffer size.
1336 d.writeInt(-1); // dwQuality
1337 // Specifies an indicator of the quality of the data in the stream.
1338 // Quality is represented as a number between 0 and 10,000.
1339 // For compressed data, this typically represents the value of the
1340 // quality parameter passed to the compression software. If set to –1,
1341 // drivers use the default quality value.
1343 d.writeUInt(0); // dwSampleSize
1344 // Specifies the size of a single sample of data. This is set to zero
1345 // if the samples can vary in size. If this number is nonzero, then
1346 // multiple samples of data can be grouped into a single chunk within
1347 // the file. If it is zero, each sample of data (such as a video frame)
1348 // must be in a separate chunk. For video streams, this number is
1349 // typically zero, although it can be nonzero if all video frames are
1350 // the same size. For audio streams, this number should be the same as
1351 // the nBlockAlign member of the WAVEFORMATEX structure describing the
1352 // audio.
1354 d.writeUShort(0); // rcFrame.left
1355 d.writeUShort(0); // rcFrame.top
1356 d.writeUShort(imgWidth); // rcFrame.right
1357 d.writeUShort(imgHeight); // rcFrame.bottom
1358 // Specifies the destination rectangle for a text or video stream within
1359 // the movie rectangle specified by the dwWidth and dwHeight members of
1360 // the AVI main header structure. The rcFrame member is typically used
1361 // in support of multiple video streams. Set this rectangle to the
1362 // coordinates corresponding to the movie rectangle to update the whole
1363 // movie rectangle. Units for this member are pixels. The upper-left
1364 // corner of the destination rectangle is relative to the upper-left
1365 // corner of the movie rectangle.
1367 /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk
1368 /* -------------
1369 * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx
1370 typedef struct tagBITMAPINFOHEADER {
1371 DWORD biSize;
1372 LONG biWidth;
1373 LONG biHeight;
1374 WORD biPlanes;
1375 WORD biBitCount;
1376 DWORD biCompression;
1377 DWORD biSizeImage;
1378 LONG biXPelsPerMeter;
1379 LONG biYPelsPerMeter;
1380 DWORD biClrUsed;
1381 DWORD biClrImportant;
1382 } BITMAPINFOHEADER;
1383 */
1384 strfChunk.seekToStartOfData();
1385 d = strfChunk.getOutputStream();
1386 d.writeUInt(40); // biSize
1387 // Specifies the number of bytes required by the structure. This value
1388 // does not include the size of the color table or the size of the color
1389 // masks, if they are appended to the end of structure.
1391 d.writeInt(imgWidth); // biWidth
1392 // Specifies the width of the bitmap, in pixels.
1394 d.writeInt(imgHeight); // biHeight
1395 // Specifies the height of the bitmap, in pixels.
1396 //
1397 // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is
1398 // a bottom-up DIB with the origin at the lower left corner. If biHeight
1399 // is negative, the bitmap is a top-down DIB with the origin at the
1400 // upper left corner.
1401 // For YUV bitmaps, the bitmap is always top-down, regardless of the
1402 // sign of biHeight. Decoders should offer YUV formats with postive
1403 // biHeight, but for backward compatibility they should accept YUV
1404 // formats with either positive or negative biHeight.
1405 // For compressed formats, biHeight must be positive, regardless of
1406 // image orientation.
1408 d.writeShort(1); // biPlanes
1409 // Specifies the number of planes for the target device. This value must
1410 // be set to 1.
1412 d.writeShort(imgDepth); // biBitCount
1413 // Specifies the number of bits per pixel (bpp). For uncompressed
1414 // formats, this value is the average number of bits per pixel. For
1415 // compressed formats, this value is the implied bit depth of the
1416 // uncompressed image, after the image has been decoded.
1418 switch (videoFormat) {
1419 case RAW:
1420 default:
1421 d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB
1422 break;
1423 case RLE:
1424 if (imgDepth == 8) {
1425 d.writeInt(1); // biCompression - BI_RLE8
1426 } else if (imgDepth == 4) {
1427 d.writeInt(2); // biCompression - BI_RLE4
1428 } else {
1429 throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images");
1431 break;
1432 case JPG:
1433 d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG
1434 break;
1435 case PNG:
1436 d.writeType("png "); // biCompression - png for PNG
1437 break;
1439 // For compressed video and YUV formats, this member is a FOURCC code,
1440 // specified as a DWORD in little-endian order. For example, YUYV video
1441 // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC
1442 // Codes.
1443 //
1444 // For uncompressed RGB formats, the following values are possible:
1445 //
1446 // Value Description
1447 // BI_RGB 0x00000000 Uncompressed RGB.
1448 // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks.
1449 // Valid for 16-bpp and 32-bpp bitmaps.
1450 //
1451 // Note that BI_JPG and BI_PNG are not valid video formats.
1452 //
1453 // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is
1454 // always RGB 555. If biCompression equals BI_BITFIELDS, the format is
1455 // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE
1456 // structure to determine the specific RGB type.
1458 switch (videoFormat) {
1459 case RAW:
1460 d.writeInt(0); // biSizeImage
1461 break;
1462 case RLE:
1463 case JPG:
1464 case PNG:
1465 default:
1466 if (imgDepth == 4) {
1467 d.writeInt(imgWidth * imgHeight / 2); // biSizeImage
1468 } else {
1469 int bytesPerPixel = Math.max(1, imgDepth / 8);
1470 d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage
1472 break;
1474 // Specifies the size, in bytes, of the image. This can be set to 0 for
1475 // uncompressed RGB bitmaps.
1477 d.writeInt(0); // biXPelsPerMeter
1478 // Specifies the horizontal resolution, in pixels per meter, of the
1479 // target device for the bitmap.
1481 d.writeInt(0); // biYPelsPerMeter
1482 // Specifies the vertical resolution, in pixels per meter, of the target
1483 // device for the bitmap.
1485 d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed
1486 // Specifies the number of color indices in the color table that are
1487 // actually used by the bitmap.
1489 d.writeInt(0); // biClrImportant
1490 // Specifies the number of color indices that are considered important
1491 // for displaying the bitmap. If this value is zero, all colors are
1492 // important.
1494 if (palette != null) {
1495 for (int i = 0, n = palette.getMapSize(); i < n; ++i) {
1496 /*
1497 * typedef struct tagRGBQUAD {
1498 BYTE rgbBlue;
1499 BYTE rgbGreen;
1500 BYTE rgbRed;
1501 BYTE rgbReserved; // This member is reserved and must be zero.
1502 } RGBQUAD;
1503 */
1504 d.write(palette.getBlue(i));
1505 d.write(palette.getGreen(i));
1506 d.write(palette.getRed(i));
1507 d.write(0);
1512 // -----------------
1513 aviChunk.finish();