Mercurial > jmeCapture
comparison src/com/aurellem/capture/AVIOutputStream.java @ 3:a92de00f0414
migrating files
author | Robert McIntyre <rlm@mit.edu> |
---|---|
date | Tue, 25 Oct 2011 11:55:55 -0700 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
2:59509c585530 | 3:a92de00f0414 |
---|---|
1 /** | |
2 * @(#)AVIOutputStream.java 1.5.1 2011-01-17 | |
3 * | |
4 * Copyright (c) 2008-2011 Werner Randelshofer, Immensee, Switzerland. | |
5 * All rights reserved. | |
6 * | |
7 * You may not use, copy or modify this file, except in compliance with the | |
8 * license agreement you entered into with Werner Randelshofer. | |
9 * For details see accompanying license terms. | |
10 */ | |
11 package com.aurellem.capture; | |
12 | |
13 import java.awt.Dimension; | |
14 import java.awt.image.BufferedImage; | |
15 import java.awt.image.DataBufferByte; | |
16 import java.awt.image.IndexColorModel; | |
17 import java.awt.image.WritableRaster; | |
18 import java.io.File; | |
19 import java.io.FileInputStream; | |
20 import java.io.IOException; | |
21 import java.io.InputStream; | |
22 import java.io.OutputStream; | |
23 import java.util.Arrays; | |
24 import java.util.Date; | |
25 import java.util.LinkedList; | |
26 | |
27 import javax.imageio.IIOImage; | |
28 import javax.imageio.ImageIO; | |
29 import javax.imageio.ImageWriteParam; | |
30 import javax.imageio.ImageWriter; | |
31 import javax.imageio.stream.FileImageOutputStream; | |
32 import javax.imageio.stream.ImageOutputStream; | |
33 import javax.imageio.stream.MemoryCacheImageOutputStream; | |
34 | |
35 /** | |
36 * This class supports writing of images into an AVI 1.0 video file. | |
37 * <p> | |
38 * The images are written as video frames. | |
39 * <p> | |
40 * Video frames can be encoded with one of the following formats: | |
41 * <ul> | |
42 * <li>JPEG</li> | |
43 * <li>PNG</li> | |
44 * <li>RAW</li> | |
45 * <li>RLE</li> | |
46 * </ul> | |
47 * All frames must have the same format. | |
48 * When JPG is used each frame can have an individual encoding quality. | |
49 * <p> | |
50 * All frames in an AVI file must have the same duration. The duration can | |
51 * be set by setting an appropriate pair of values using methods | |
52 * {@link #setFrameRate} and {@link #setTimeScale}. | |
53 * <p> | |
54 * The length of an AVI 1.0 file is limited to 1 GB. | |
55 * This class supports lengths of up to 4 GB, but such files may not work on | |
56 * all players. | |
57 * <p> | |
58 * For detailed information about the AVI RIFF file format see:<br> | |
59 * <a href="http://msdn.microsoft.com/en-us/library/ms779636.aspx">msdn.microsoft.com AVI RIFF</a><br> | |
60 * <a href="http://www.microsoft.com/whdc/archive/fourcc.mspx">www.microsoft.com FOURCC for Video Compression</a><br> | |
61 * <a href="http://www.saettler.com/RIFFMCI/riffmci.html">www.saettler.com RIFF</a><br> | |
62 * | |
63 * @author Werner Randelshofer | |
64 * @version 1.5.1 2011-01-17 Fixes unintended closing of output stream.. | |
65 * <br>1.5 2011-01-06 Adds support for RLE 8-bit video format. | |
66 * <br>1.4 2011-01-04 Adds support for RAW 4-bit and 8-bit video format. Fixes offsets | |
67 * in "idx1" chunk. | |
68 * <br>1.3.2 2010-12-27 File size limit is 1 GB. | |
69 * <br>1.3.1 2010-07-19 Fixes seeking and calculation of offsets. | |
70 * <br>1.3 2010-07-08 Adds constructor with ImageOutputStream. | |
71 * Added method getVideoDimension(). | |
72 * <br>1.2 2009-08-29 Adds support for RAW video format. | |
73 * <br>1.1 2008-08-27 Fixes computation of dwMicroSecPerFrame in avih | |
74 * chunk. Changed the API to reflect that AVI works with frame rates instead of | |
75 * with frame durations. | |
76 * <br>1.0.1 2008-08-13 Uses FourCC "MJPG" instead of "jpg " for JPG | |
77 * encoded video. | |
78 * <br>1.0 2008-08-11 Created. | |
79 */ | |
80 public class AVIOutputStream { | |
81 | |
82 /** | |
83 * Underlying output stream. | |
84 */ | |
85 private ImageOutputStream out; | |
86 /** The offset of the QuickTime stream in the underlying ImageOutputStream. | |
87 * Normally this is 0 unless the underlying stream already contained data | |
88 * when it was passed to the constructor. | |
89 */ | |
90 private long streamOffset; | |
91 /** Previous frame for delta compression. */ | |
92 private Object previousData; | |
93 | |
94 /** | |
95 * Supported video encodings. | |
96 */ | |
97 public static enum VideoFormat { | |
98 | |
99 RAW, RLE, JPG, PNG; | |
100 } | |
101 /** | |
102 * Current video formats. | |
103 */ | |
104 private VideoFormat videoFormat; | |
105 /** | |
106 * Quality of JPEG encoded video frames. | |
107 */ | |
108 private float quality = 0.9f; | |
109 /** | |
110 * Creation time of the movie output stream. | |
111 */ | |
112 private Date creationTime; | |
113 /** | |
114 * Width of the video frames. All frames must have the same width. | |
115 * The value -1 is used to mark unspecified width. | |
116 */ | |
117 private int imgWidth = -1; | |
118 /** | |
119 * Height of the video frames. All frames must have the same height. | |
120 * The value -1 is used to mark unspecified height. | |
121 */ | |
122 private int imgHeight = -1; | |
123 /** Number of bits per pixel. */ | |
124 private int imgDepth = 24; | |
125 /** Index color model for RAW_RGB4 and RAW_RGB8 formats. */ | |
126 private IndexColorModel palette; | |
127 private IndexColorModel previousPalette; | |
128 /** Video encoder. */ | |
129 | |
130 /** | |
131 * The timeScale of the movie. | |
132 * <p> | |
133 * Used with frameRate to specify the time scale that this stream will use. | |
134 * Dividing frameRate by timeScale gives the number of samples per second. | |
135 * For video streams, this is the frame rate. For audio streams, this rate | |
136 * corresponds to the time needed to play nBlockAlign bytes of audio, which | |
137 * for PCM audio is the just the sample rate. | |
138 */ | |
139 private int timeScale = 1; | |
140 /** | |
141 * The frameRate of the movie in timeScale units. | |
142 * <p> | |
143 * @see timeScale | |
144 */ | |
145 private int frameRate = 30; | |
146 /** Interval between keyframes. */ | |
147 private int syncInterval = 30; | |
148 | |
149 /** | |
150 * The states of the movie output stream. | |
151 */ | |
152 private static enum States { | |
153 | |
154 STARTED, FINISHED, CLOSED; | |
155 } | |
156 /** | |
157 * The current state of the movie output stream. | |
158 */ | |
159 private States state = States.FINISHED; | |
160 | |
161 /** | |
162 * AVI stores media data in samples. | |
163 * A sample is a single element in a sequence of time-ordered data. | |
164 */ | |
165 private static class Sample { | |
166 | |
167 String chunkType; | |
168 /** Offset of the sample relative to the start of the AVI file. | |
169 */ | |
170 long offset; | |
171 /** Data length of the sample. */ | |
172 long length; | |
173 /** | |
174 * The duration of the sample in time scale units. | |
175 */ | |
176 int duration; | |
177 /** Whether the sample is a sync-sample. */ | |
178 boolean isSync; | |
179 | |
180 /** | |
181 * Creates a new sample. | |
182 * @param duration | |
183 * @param offset | |
184 * @param length | |
185 */ | |
186 public Sample(String chunkId, int duration, long offset, long length, boolean isSync) { | |
187 this.chunkType = chunkId; | |
188 this.duration = duration; | |
189 this.offset = offset; | |
190 this.length = length; | |
191 this.isSync = isSync; | |
192 } | |
193 } | |
194 /** | |
195 * List of video frames. | |
196 */ | |
197 private LinkedList<Sample> videoFrames; | |
198 /** | |
199 * This chunk holds the whole AVI content. | |
200 */ | |
201 private CompositeChunk aviChunk; | |
202 /** | |
203 * This chunk holds the movie frames. | |
204 */ | |
205 private CompositeChunk moviChunk; | |
206 /** | |
207 * This chunk holds the AVI Main Header. | |
208 */ | |
209 FixedSizeDataChunk avihChunk; | |
210 /** | |
211 * This chunk holds the AVI Stream Header. | |
212 */ | |
213 FixedSizeDataChunk strhChunk; | |
214 /** | |
215 * This chunk holds the AVI Stream Format Header. | |
216 */ | |
217 FixedSizeDataChunk strfChunk; | |
218 | |
219 /** | |
220 * Chunk base class. | |
221 */ | |
222 private abstract class Chunk { | |
223 | |
224 /** | |
225 * The chunkType of the chunk. A String with the length of 4 characters. | |
226 */ | |
227 protected String chunkType; | |
228 /** | |
229 * The offset of the chunk relative to the start of the | |
230 * ImageOutputStream. | |
231 */ | |
232 protected long offset; | |
233 | |
234 /** | |
235 * Creates a new Chunk at the current position of the ImageOutputStream. | |
236 * @param chunkType The chunkType of the chunk. A string with a length of 4 characters. | |
237 */ | |
238 public Chunk(String chunkType) throws IOException { | |
239 this.chunkType = chunkType; | |
240 offset = getRelativeStreamPosition(); | |
241 } | |
242 | |
243 /** | |
244 * Writes the chunk to the ImageOutputStream and disposes it. | |
245 */ | |
246 public abstract void finish() throws IOException; | |
247 | |
248 /** | |
249 * Returns the size of the chunk including the size of the chunk header. | |
250 * @return The size of the chunk. | |
251 */ | |
252 public abstract long size(); | |
253 } | |
254 | |
255 /** | |
256 * A CompositeChunk contains an ordered list of Chunks. | |
257 */ | |
258 private class CompositeChunk extends Chunk { | |
259 | |
260 /** | |
261 * The type of the composite. A String with the length of 4 characters. | |
262 */ | |
263 protected String compositeType; | |
264 private LinkedList<Chunk> children; | |
265 private boolean finished; | |
266 | |
267 /** | |
268 * Creates a new CompositeChunk at the current position of the | |
269 * ImageOutputStream. | |
270 * @param compositeType The type of the composite. | |
271 * @param chunkType The type of the chunk. | |
272 */ | |
273 public CompositeChunk(String compositeType, String chunkType) throws IOException { | |
274 super(chunkType); | |
275 this.compositeType = compositeType; | |
276 //out.write | |
277 out.writeLong(0); // make room for the chunk header | |
278 out.writeInt(0); // make room for the chunk header | |
279 children = new LinkedList<Chunk>(); | |
280 } | |
281 | |
282 public void add(Chunk child) throws IOException { | |
283 if (children.size() > 0) { | |
284 children.getLast().finish(); | |
285 } | |
286 children.add(child); | |
287 } | |
288 | |
289 /** | |
290 * Writes the chunk and all its children to the ImageOutputStream | |
291 * and disposes of all resources held by the chunk. | |
292 * @throws java.io.IOException | |
293 */ | |
294 @Override | |
295 public void finish() throws IOException { | |
296 if (!finished) { | |
297 if (size() > 0xffffffffL) { | |
298 throw new IOException("CompositeChunk \"" + chunkType + "\" is too large: " + size()); | |
299 } | |
300 | |
301 long pointer = getRelativeStreamPosition(); | |
302 seekRelative(offset); | |
303 | |
304 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); | |
305 headerData.writeType(compositeType); | |
306 headerData.writeUInt(size() - 8); | |
307 headerData.writeType(chunkType); | |
308 for (Chunk child : children) { | |
309 child.finish(); | |
310 } | |
311 seekRelative(pointer); | |
312 if (size() % 2 == 1) { | |
313 out.writeByte(0); // write pad byte | |
314 } | |
315 finished = true; | |
316 } | |
317 } | |
318 | |
319 @Override | |
320 public long size() { | |
321 long length = 12; | |
322 for (Chunk child : children) { | |
323 length += child.size() + child.size() % 2; | |
324 } | |
325 return length; | |
326 } | |
327 } | |
328 | |
329 /** | |
330 * Data Chunk. | |
331 */ | |
332 private class DataChunk extends Chunk { | |
333 | |
334 private DataChunkOutputStream data; | |
335 private boolean finished; | |
336 | |
337 /** | |
338 * Creates a new DataChunk at the current position of the | |
339 * ImageOutputStream. | |
340 * @param chunkType The chunkType of the chunk. | |
341 */ | |
342 public DataChunk(String name) throws IOException { | |
343 super(name); | |
344 out.writeLong(0); // make room for the chunk header | |
345 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out), false); | |
346 } | |
347 | |
348 public DataChunkOutputStream getOutputStream() { | |
349 if (finished) { | |
350 throw new IllegalStateException("DataChunk is finished"); | |
351 } | |
352 return data; | |
353 } | |
354 | |
355 /** | |
356 * Returns the offset of this chunk to the beginning of the random access file | |
357 * @return | |
358 */ | |
359 public long getOffset() { | |
360 return offset; | |
361 } | |
362 | |
363 @Override | |
364 public void finish() throws IOException { | |
365 if (!finished) { | |
366 long sizeBefore = size(); | |
367 | |
368 if (size() > 0xffffffffL) { | |
369 throw new IOException("DataChunk \"" + chunkType + "\" is too large: " + size()); | |
370 } | |
371 | |
372 long pointer = getRelativeStreamPosition(); | |
373 seekRelative(offset); | |
374 | |
375 DataChunkOutputStream headerData = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); | |
376 headerData.writeType(chunkType); | |
377 headerData.writeUInt(size() - 8); | |
378 seekRelative(pointer); | |
379 if (size() % 2 == 1) { | |
380 out.writeByte(0); // write pad byte | |
381 } | |
382 finished = true; | |
383 long sizeAfter = size(); | |
384 if (sizeBefore != sizeAfter) { | |
385 System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); | |
386 } | |
387 } | |
388 } | |
389 | |
390 @Override | |
391 public long size() { | |
392 return 8 + data.size(); | |
393 } | |
394 } | |
395 | |
396 /** | |
397 * A DataChunk with a fixed size. | |
398 */ | |
399 private class FixedSizeDataChunk extends Chunk { | |
400 | |
401 private DataChunkOutputStream data; | |
402 private boolean finished; | |
403 private long fixedSize; | |
404 | |
405 /** | |
406 * Creates a new DataChunk at the current position of the | |
407 * ImageOutputStream. | |
408 * @param chunkType The chunkType of the chunk. | |
409 */ | |
410 public FixedSizeDataChunk(String chunkType, long fixedSize) throws IOException { | |
411 super(chunkType); | |
412 this.fixedSize = fixedSize; | |
413 data = new DataChunkOutputStream(new ImageOutputStreamAdapter(out),false); | |
414 data.writeType(chunkType); | |
415 data.writeUInt(fixedSize); | |
416 data.clearCount(); | |
417 | |
418 // Fill fixed size with nulls | |
419 byte[] buf = new byte[(int) Math.min(512, fixedSize)]; | |
420 long written = 0; | |
421 while (written < fixedSize) { | |
422 data.write(buf, 0, (int) Math.min(buf.length, fixedSize - written)); | |
423 written += Math.min(buf.length, fixedSize - written); | |
424 } | |
425 if (fixedSize % 2 == 1) { | |
426 out.writeByte(0); // write pad byte | |
427 } | |
428 seekToStartOfData(); | |
429 } | |
430 | |
431 public DataChunkOutputStream getOutputStream() { | |
432 /*if (finished) { | |
433 throw new IllegalStateException("DataChunk is finished"); | |
434 }*/ | |
435 return data; | |
436 } | |
437 | |
438 /** | |
439 * Returns the offset of this chunk to the beginning of the random access file | |
440 * @return | |
441 */ | |
442 public long getOffset() { | |
443 return offset; | |
444 } | |
445 | |
446 public void seekToStartOfData() throws IOException { | |
447 seekRelative(offset + 8); | |
448 data.clearCount(); | |
449 } | |
450 | |
451 public void seekToEndOfChunk() throws IOException { | |
452 seekRelative(offset + 8 + fixedSize + fixedSize % 2); | |
453 } | |
454 | |
455 @Override | |
456 public void finish() throws IOException { | |
457 if (!finished) { | |
458 finished = true; | |
459 } | |
460 } | |
461 | |
462 @Override | |
463 public long size() { | |
464 return 8 + fixedSize; | |
465 } | |
466 } | |
467 | |
468 /** | |
469 * Creates a new AVI file with the specified video format and | |
470 * frame rate. The video has 24 bits per pixel. | |
471 * | |
472 * @param file the output file | |
473 * @param format Selects an encoder for the video format. | |
474 * @param bitsPerPixel the number of bits per pixel. | |
475 * @exception IllegalArgumentException if videoFormat is null or if | |
476 * frame rate is <= 0 | |
477 */ | |
478 public AVIOutputStream(File file, VideoFormat format) throws IOException { | |
479 this(file,format,24); | |
480 } | |
481 /** | |
482 * Creates a new AVI file with the specified video format and | |
483 * frame rate. | |
484 * | |
485 * @param file the output file | |
486 * @param format Selects an encoder for the video format. | |
487 * @param bitsPerPixel the number of bits per pixel. | |
488 * @exception IllegalArgumentException if videoFormat is null or if | |
489 * frame rate is <= 0 | |
490 */ | |
491 public AVIOutputStream(File file, VideoFormat format, int bitsPerPixel) throws IOException { | |
492 if (format == null) { | |
493 throw new IllegalArgumentException("format must not be null"); | |
494 } | |
495 | |
496 if (file.exists()) { | |
497 file.delete(); | |
498 } | |
499 this.out = new FileImageOutputStream(file); | |
500 this.streamOffset = 0; | |
501 this.videoFormat = format; | |
502 this.videoFrames = new LinkedList<Sample>(); | |
503 this.imgDepth = bitsPerPixel; | |
504 if (imgDepth == 4) { | |
505 byte[] gray = new byte[16]; | |
506 for (int i = 0; i < gray.length; i++) { | |
507 gray[i] = (byte) ((i << 4) | i); | |
508 } | |
509 palette = new IndexColorModel(4, 16, gray, gray, gray); | |
510 } else if (imgDepth == 8) { | |
511 byte[] gray = new byte[256]; | |
512 for (int i = 0; i < gray.length; i++) { | |
513 gray[i] = (byte) i; | |
514 } | |
515 palette = new IndexColorModel(8, 256, gray, gray, gray); | |
516 } | |
517 | |
518 } | |
519 | |
520 /** | |
521 * Creates a new AVI output stream with the specified video format and | |
522 * framerate. | |
523 * | |
524 * @param out the underlying output stream | |
525 * @param format Selects an encoder for the video format. | |
526 * @exception IllegalArgumentException if videoFormat is null or if | |
527 * framerate is <= 0 | |
528 */ | |
529 public AVIOutputStream(ImageOutputStream out, VideoFormat format) throws IOException { | |
530 if (format == null) { | |
531 throw new IllegalArgumentException("format must not be null"); | |
532 } | |
533 this.out = out; | |
534 this.streamOffset = out.getStreamPosition(); | |
535 this.videoFormat = format; | |
536 this.videoFrames = new LinkedList<Sample>(); | |
537 } | |
538 | |
539 /** | |
540 * Used with frameRate to specify the time scale that this stream will use. | |
541 * Dividing frameRate by timeScale gives the number of samples per second. | |
542 * For video streams, this is the frame rate. For audio streams, this rate | |
543 * corresponds to the time needed to play nBlockAlign bytes of audio, which | |
544 * for PCM audio is the just the sample rate. | |
545 * <p> | |
546 * The default value is 1. | |
547 * | |
548 * @param newValue | |
549 */ | |
550 public void setTimeScale(int newValue) { | |
551 if (newValue <= 0) { | |
552 throw new IllegalArgumentException("timeScale must be greater 0"); | |
553 } | |
554 this.timeScale = newValue; | |
555 } | |
556 | |
557 /** | |
558 * Returns the time scale of this media. | |
559 * | |
560 * @return time scale | |
561 */ | |
562 public int getTimeScale() { | |
563 return timeScale; | |
564 } | |
565 | |
566 /** | |
567 * Sets the rate of video frames in time scale units. | |
568 * <p> | |
569 * The default value is 30. Together with the default value 1 of timeScale | |
570 * this results in 30 frames pers second. | |
571 * | |
572 * @param newValue | |
573 */ | |
574 public void setFrameRate(int newValue) { | |
575 if (newValue <= 0) { | |
576 throw new IllegalArgumentException("frameDuration must be greater 0"); | |
577 } | |
578 if (state == States.STARTED) { | |
579 throw new IllegalStateException("frameDuration must be set before the first frame is written"); | |
580 } | |
581 this.frameRate = newValue; | |
582 } | |
583 | |
584 /** | |
585 * Returns the frame rate of this media. | |
586 * | |
587 * @return frame rate | |
588 */ | |
589 public int getFrameRate() { | |
590 return frameRate; | |
591 } | |
592 | |
593 /** Sets the global color palette. */ | |
594 public void setPalette(IndexColorModel palette) { | |
595 this.palette = palette; | |
596 } | |
597 | |
598 /** | |
599 * Sets the compression quality of the video track. | |
600 * A value of 0 stands for "high compression is important" a value of | |
601 * 1 for "high image quality is important". | |
602 * <p> | |
603 * Changing this value affects frames which are subsequently written | |
604 * to the AVIOutputStream. Frames which have already been written | |
605 * are not changed. | |
606 * <p> | |
607 * This value has only effect on videos encoded with JPG format. | |
608 * <p> | |
609 * The default value is 0.9. | |
610 * | |
611 * @param newValue | |
612 */ | |
613 public void setVideoCompressionQuality(float newValue) { | |
614 this.quality = newValue; | |
615 } | |
616 | |
617 /** | |
618 * Returns the video compression quality. | |
619 * | |
620 * @return video compression quality | |
621 */ | |
622 public float getVideoCompressionQuality() { | |
623 return quality; | |
624 } | |
625 | |
626 /** | |
627 * Sets the dimension of the video track. | |
628 * <p> | |
629 * You need to explicitly set the dimension, if you add all frames from | |
630 * files or input streams. | |
631 * <p> | |
632 * If you add frames from buffered images, then AVIOutputStream | |
633 * can determine the video dimension from the image width and height. | |
634 * | |
635 * @param width Must be greater than 0. | |
636 * @param height Must be greater than 0. | |
637 */ | |
638 public void setVideoDimension(int width, int height) { | |
639 if (width < 1 || height < 1) { | |
640 throw new IllegalArgumentException("width and height must be greater zero."); | |
641 } | |
642 this.imgWidth = width; | |
643 this.imgHeight = height; | |
644 } | |
645 | |
646 /** | |
647 * Gets the dimension of the video track. | |
648 * <p> | |
649 * Returns null if the dimension is not known. | |
650 */ | |
651 public Dimension getVideoDimension() { | |
652 if (imgWidth < 1 || imgHeight < 1) { | |
653 return null; | |
654 } | |
655 return new Dimension(imgWidth, imgHeight); | |
656 } | |
657 | |
658 /** | |
659 * Sets the state of the QuickTimeOutpuStream to started. | |
660 * <p> | |
661 * If the state is changed by this method, the prolog is | |
662 * written. | |
663 */ | |
664 private void ensureStarted() throws IOException { | |
665 if (state != States.STARTED) { | |
666 creationTime = new Date(); | |
667 writeProlog(); | |
668 state = States.STARTED; | |
669 } | |
670 } | |
671 | |
672 /** | |
673 * Writes a frame to the video track. | |
674 * <p> | |
675 * If the dimension of the video track has not been specified yet, it | |
676 * is derived from the first buffered image added to the AVIOutputStream. | |
677 * | |
678 * @param image The frame image. | |
679 * | |
680 * @throws IllegalArgumentException if the duration is less than 1, or | |
681 * if the dimension of the frame does not match the dimension of the video | |
682 * track. | |
683 * @throws IOException if writing the image failed. | |
684 */ | |
685 public void writeFrame(BufferedImage image) throws IOException { | |
686 ensureOpen(); | |
687 ensureStarted(); | |
688 | |
689 // Get the dimensions of the first image | |
690 if (imgWidth == -1) { | |
691 imgWidth = image.getWidth(); | |
692 imgHeight = image.getHeight(); | |
693 } else { | |
694 // The dimension of the image must match the dimension of the video track | |
695 if (imgWidth != image.getWidth() || imgHeight != image.getHeight()) { | |
696 throw new IllegalArgumentException("Dimensions of image[" + videoFrames.size() | |
697 + "] (width=" + image.getWidth() + ", height=" + image.getHeight() | |
698 + ") differs from image[0] (width=" | |
699 + imgWidth + ", height=" + imgHeight); | |
700 } | |
701 } | |
702 | |
703 DataChunk videoFrameChunk; | |
704 long offset = getRelativeStreamPosition(); | |
705 boolean isSync = true; | |
706 switch (videoFormat) { | |
707 case RAW: { | |
708 switch (imgDepth) { | |
709 case 4: { | |
710 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); | |
711 int[] imgRGBs = new int[16]; | |
712 imgPalette.getRGBs(imgRGBs); | |
713 int[] previousRGBs = new int[16]; | |
714 if (previousPalette == null) { | |
715 previousPalette = palette; | |
716 } | |
717 previousPalette.getRGBs(previousRGBs); | |
718 if (!Arrays.equals(imgRGBs, previousRGBs)) { | |
719 previousPalette = imgPalette; | |
720 DataChunk paletteChangeChunk = new DataChunk("00pc"); | |
721 /* | |
722 int first = imgPalette.getMapSize(); | |
723 int last = -1; | |
724 for (int i = 0; i < 16; i++) { | |
725 if (previousRGBs[i] != imgRGBs[i] && i < first) { | |
726 first = i; | |
727 } | |
728 if (previousRGBs[i] != imgRGBs[i] && i > last) { | |
729 last = i; | |
730 } | |
731 }*/ | |
732 int first = 0; | |
733 int last = imgPalette.getMapSize() - 1; | |
734 /* | |
735 * typedef struct { | |
736 BYTE bFirstEntry; | |
737 BYTE bNumEntries; | |
738 WORD wFlags; | |
739 PALETTEENTRY peNew[]; | |
740 } AVIPALCHANGE; | |
741 * | |
742 * typedef struct tagPALETTEENTRY { | |
743 BYTE peRed; | |
744 BYTE peGreen; | |
745 BYTE peBlue; | |
746 BYTE peFlags; | |
747 } PALETTEENTRY; | |
748 */ | |
749 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); | |
750 pOut.writeByte(first);//bFirstEntry | |
751 pOut.writeByte(last - first + 1);//bNumEntries | |
752 pOut.writeShort(0);//wFlags | |
753 | |
754 for (int i = first; i <= last; i++) { | |
755 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red | |
756 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green | |
757 pOut.writeByte(imgRGBs[i] & 0xff); // blue | |
758 pOut.writeByte(0); // reserved*/ | |
759 } | |
760 | |
761 moviChunk.add(paletteChangeChunk); | |
762 paletteChangeChunk.finish(); | |
763 long length = getRelativeStreamPosition() - offset; | |
764 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); | |
765 offset = getRelativeStreamPosition(); | |
766 } | |
767 | |
768 videoFrameChunk = new DataChunk("00db"); | |
769 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); | |
770 byte[] rgb4 = new byte[imgWidth / 2]; | |
771 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down | |
772 for (int x = 0, xx = 0, n = imgWidth; x < n; x += 2, ++xx) { | |
773 rgb4[xx] = (byte) (((rgb8[y + x] & 0xf) << 4) | (rgb8[y + x + 1] & 0xf)); | |
774 } | |
775 videoFrameChunk.getOutputStream().write(rgb4); | |
776 } | |
777 break; | |
778 } | |
779 case 8: { | |
780 IndexColorModel imgPalette = (IndexColorModel) image.getColorModel(); | |
781 int[] imgRGBs = new int[256]; | |
782 imgPalette.getRGBs(imgRGBs); | |
783 int[] previousRGBs = new int[256]; | |
784 if (previousPalette == null) { | |
785 previousPalette = palette; | |
786 } | |
787 previousPalette.getRGBs(previousRGBs); | |
788 if (!Arrays.equals(imgRGBs, previousRGBs)) { | |
789 previousPalette = imgPalette; | |
790 DataChunk paletteChangeChunk = new DataChunk("00pc"); | |
791 /* | |
792 int first = imgPalette.getMapSize(); | |
793 int last = -1; | |
794 for (int i = 0; i < 16; i++) { | |
795 if (previousRGBs[i] != imgRGBs[i] && i < first) { | |
796 first = i; | |
797 } | |
798 if (previousRGBs[i] != imgRGBs[i] && i > last) { | |
799 last = i; | |
800 } | |
801 }*/ | |
802 int first = 0; | |
803 int last = imgPalette.getMapSize() - 1; | |
804 /* | |
805 * typedef struct { | |
806 BYTE bFirstEntry; | |
807 BYTE bNumEntries; | |
808 WORD wFlags; | |
809 PALETTEENTRY peNew[]; | |
810 } AVIPALCHANGE; | |
811 * | |
812 * typedef struct tagPALETTEENTRY { | |
813 BYTE peRed; | |
814 BYTE peGreen; | |
815 BYTE peBlue; | |
816 BYTE peFlags; | |
817 } PALETTEENTRY; | |
818 */ | |
819 DataChunkOutputStream pOut = paletteChangeChunk.getOutputStream(); | |
820 pOut.writeByte(first);//bFirstEntry | |
821 pOut.writeByte(last - first + 1);//bNumEntries | |
822 pOut.writeShort(0);//wFlags | |
823 | |
824 for (int i = first; i <= last; i++) { | |
825 pOut.writeByte((imgRGBs[i] >>> 16) & 0xff); // red | |
826 pOut.writeByte((imgRGBs[i] >>> 8) & 0xff); // green | |
827 pOut.writeByte(imgRGBs[i] & 0xff); // blue | |
828 pOut.writeByte(0); // reserved*/ | |
829 } | |
830 | |
831 moviChunk.add(paletteChangeChunk); | |
832 paletteChangeChunk.finish(); | |
833 long length = getRelativeStreamPosition() - offset; | |
834 videoFrames.add(new Sample(paletteChangeChunk.chunkType, 0, offset, length - 8, false)); | |
835 offset = getRelativeStreamPosition(); | |
836 } | |
837 | |
838 videoFrameChunk = new DataChunk("00db"); | |
839 byte[] rgb8 = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); | |
840 for (int y = (imgHeight - 1) * imgWidth; y >= 0; y -= imgWidth) { // Upside down | |
841 videoFrameChunk.getOutputStream().write(rgb8, y, imgWidth); | |
842 } | |
843 break; | |
844 } | |
845 default: { | |
846 videoFrameChunk = new DataChunk("00db"); | |
847 WritableRaster raster = image.getRaster(); | |
848 int[] raw = new int[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 32 bit data | |
849 byte[] bytes = new byte[imgWidth * 3]; // holds a scanline of raw image data with 3 channels of 8 bit data | |
850 for (int y = imgHeight - 1; y >= 0; --y) { // Upside down | |
851 raster.getPixels(0, y, imgWidth, 1, raw); | |
852 for (int x = 0, n = imgWidth * 3; x < n; x += 3) { | |
853 bytes[x + 2] = (byte) raw[x]; // Blue | |
854 bytes[x + 1] = (byte) raw[x + 1]; // Green | |
855 bytes[x] = (byte) raw[x + 2]; // Red | |
856 } | |
857 videoFrameChunk.getOutputStream().write(bytes); | |
858 } | |
859 break; | |
860 } | |
861 } | |
862 break; | |
863 } | |
864 | |
865 case JPG: { | |
866 videoFrameChunk = new DataChunk("00dc"); | |
867 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/jpeg").next(); | |
868 ImageWriteParam iwParam = iw.getDefaultWriteParam(); | |
869 iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); | |
870 iwParam.setCompressionQuality(quality); | |
871 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); | |
872 iw.setOutput(imgOut); | |
873 IIOImage img = new IIOImage(image, null, null); | |
874 iw.write(null, img, iwParam); | |
875 iw.dispose(); | |
876 break; | |
877 } | |
878 case PNG: | |
879 default: { | |
880 videoFrameChunk = new DataChunk("00dc"); | |
881 ImageWriter iw = (ImageWriter) ImageIO.getImageWritersByMIMEType("image/png").next(); | |
882 ImageWriteParam iwParam = iw.getDefaultWriteParam(); | |
883 MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(videoFrameChunk.getOutputStream()); | |
884 iw.setOutput(imgOut); | |
885 IIOImage img = new IIOImage(image, null, null); | |
886 iw.write(null, img, iwParam); | |
887 iw.dispose(); | |
888 break; | |
889 } | |
890 } | |
891 long length = getRelativeStreamPosition() - offset; | |
892 moviChunk.add(videoFrameChunk); | |
893 videoFrameChunk.finish(); | |
894 | |
895 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, isSync)); | |
896 if (getRelativeStreamPosition() > 1L << 32) { | |
897 throw new IOException("AVI file is larger than 4 GB"); | |
898 } | |
899 } | |
900 | |
901 /** | |
902 * Writes a frame from a file to the video track. | |
903 * <p> | |
904 * This method does not inspect the contents of the file. | |
905 * For example, Its your responsibility to only add JPG files if you have | |
906 * chosen the JPEG video format. | |
907 * <p> | |
908 * If you add all frames from files or from input streams, then you | |
909 * have to explicitly set the dimension of the video track before you | |
910 * call finish() or close(). | |
911 * | |
912 * @param file The file which holds the image data. | |
913 * | |
914 * @throws IllegalStateException if the duration is less than 1. | |
915 * @throws IOException if writing the image failed. | |
916 */ | |
917 public void writeFrame(File file) throws IOException { | |
918 FileInputStream in = null; | |
919 try { | |
920 in = new FileInputStream(file); | |
921 writeFrame(in); | |
922 } finally { | |
923 if (in != null) { | |
924 in.close(); | |
925 } | |
926 } | |
927 } | |
928 | |
929 /** | |
930 * Writes a frame to the video track. | |
931 * <p> | |
932 * This method does not inspect the contents of the file. | |
933 * For example, its your responsibility to only add JPG files if you have | |
934 * chosen the JPEG video format. | |
935 * <p> | |
936 * If you add all frames from files or from input streams, then you | |
937 * have to explicitly set the dimension of the video track before you | |
938 * call finish() or close(). | |
939 * | |
940 * @param in The input stream which holds the image data. | |
941 * | |
942 * @throws IllegalArgumentException if the duration is less than 1. | |
943 * @throws IOException if writing the image failed. | |
944 */ | |
945 public void writeFrame(InputStream in) throws IOException { | |
946 ensureOpen(); | |
947 ensureStarted(); | |
948 | |
949 DataChunk videoFrameChunk = new DataChunk( | |
950 videoFormat == VideoFormat.RAW ? "00db" : "00dc"); | |
951 moviChunk.add(videoFrameChunk); | |
952 OutputStream mdatOut = videoFrameChunk.getOutputStream(); | |
953 long offset = getRelativeStreamPosition(); | |
954 byte[] buf = new byte[512]; | |
955 int len; | |
956 while ((len = in.read(buf)) != -1) { | |
957 mdatOut.write(buf, 0, len); | |
958 } | |
959 long length = getRelativeStreamPosition() - offset; | |
960 videoFrameChunk.finish(); | |
961 videoFrames.add(new Sample(videoFrameChunk.chunkType, frameRate, offset, length - 8, true)); | |
962 if (getRelativeStreamPosition() > 1L << 32) { | |
963 throw new IOException("AVI file is larger than 4 GB"); | |
964 } | |
965 } | |
966 | |
967 /** | |
968 * Closes the movie file as well as the stream being filtered. | |
969 * | |
970 * @exception IOException if an I/O error has occurred | |
971 */ | |
972 public void close() throws IOException { | |
973 if (state == States.STARTED) { | |
974 finish(); | |
975 } | |
976 if (state != States.CLOSED) { | |
977 out.close(); | |
978 state = States.CLOSED; | |
979 } | |
980 } | |
981 | |
982 /** | |
983 * Finishes writing the contents of the AVI output stream without closing | |
984 * the underlying stream. Use this method when applying multiple filters | |
985 * in succession to the same output stream. | |
986 * | |
987 * @exception IllegalStateException if the dimension of the video track | |
988 * has not been specified or determined yet. | |
989 * @exception IOException if an I/O exception has occurred | |
990 */ | |
991 public void finish() throws IOException { | |
992 ensureOpen(); | |
993 if (state != States.FINISHED) { | |
994 if (imgWidth == -1 || imgHeight == -1) { | |
995 throw new IllegalStateException("image width and height must be specified"); | |
996 } | |
997 | |
998 moviChunk.finish(); | |
999 writeEpilog(); | |
1000 state = States.FINISHED; | |
1001 imgWidth = imgHeight = -1; | |
1002 } | |
1003 } | |
1004 | |
1005 /** | |
1006 * Check to make sure that this stream has not been closed | |
1007 */ | |
1008 private void ensureOpen() throws IOException { | |
1009 if (state == States.CLOSED) { | |
1010 throw new IOException("Stream closed"); | |
1011 } | |
1012 } | |
1013 | |
1014 /** Gets the position relative to the beginning of the QuickTime stream. | |
1015 * <p> | |
1016 * Usually this value is equal to the stream position of the underlying | |
1017 * ImageOutputStream, but can be larger if the underlying stream already | |
1018 * contained data. | |
1019 * | |
1020 * @return The relative stream position. | |
1021 * @throws IOException | |
1022 */ | |
1023 private long getRelativeStreamPosition() throws IOException { | |
1024 return out.getStreamPosition() - streamOffset; | |
1025 } | |
1026 | |
1027 /** Seeks relative to the beginning of the QuickTime stream. | |
1028 * <p> | |
1029 * Usually this equal to seeking in the underlying ImageOutputStream, but | |
1030 * can be different if the underlying stream already contained data. | |
1031 * | |
1032 */ | |
1033 private void seekRelative(long newPosition) throws IOException { | |
1034 out.seek(newPosition + streamOffset); | |
1035 } | |
1036 | |
1037 private void writeProlog() throws IOException { | |
1038 // The file has the following structure: | |
1039 // | |
1040 // .RIFF AVI | |
1041 // ..avih (AVI Header Chunk) | |
1042 // ..LIST strl | |
1043 // ...strh (Stream Header Chunk) | |
1044 // ...strf (Stream Format Chunk) | |
1045 // ..LIST movi | |
1046 // ...00dc (Compressed video data chunk in Track 00, repeated for each frame) | |
1047 // ..idx1 (List of video data chunks and their location in the file) | |
1048 | |
1049 // The RIFF AVI Chunk holds the complete movie | |
1050 aviChunk = new CompositeChunk("RIFF", "AVI "); | |
1051 CompositeChunk hdrlChunk = new CompositeChunk("LIST", "hdrl"); | |
1052 | |
1053 // Write empty AVI Main Header Chunk - we fill the data in later | |
1054 aviChunk.add(hdrlChunk); | |
1055 avihChunk = new FixedSizeDataChunk("avih", 56); | |
1056 avihChunk.seekToEndOfChunk(); | |
1057 hdrlChunk.add(avihChunk); | |
1058 | |
1059 CompositeChunk strlChunk = new CompositeChunk("LIST", "strl"); | |
1060 hdrlChunk.add(strlChunk); | |
1061 | |
1062 // Write empty AVI Stream Header Chunk - we fill the data in later | |
1063 strhChunk = new FixedSizeDataChunk("strh", 56); | |
1064 strhChunk.seekToEndOfChunk(); | |
1065 strlChunk.add(strhChunk); | |
1066 strfChunk = new FixedSizeDataChunk("strf", palette == null ? 40 : 40 + palette.getMapSize() * 4); | |
1067 strfChunk.seekToEndOfChunk(); | |
1068 strlChunk.add(strfChunk); | |
1069 | |
1070 moviChunk = new CompositeChunk("LIST", "movi"); | |
1071 aviChunk.add(moviChunk); | |
1072 | |
1073 | |
1074 } | |
1075 | |
1076 private void writeEpilog() throws IOException { | |
1077 // Compute values | |
1078 int duration = 0; | |
1079 for (Sample s : videoFrames) { | |
1080 duration += s.duration; | |
1081 } | |
1082 long bufferSize = 0; | |
1083 for (Sample s : videoFrames) { | |
1084 if (s.length > bufferSize) { | |
1085 bufferSize = s.length; | |
1086 } | |
1087 } | |
1088 | |
1089 | |
1090 DataChunkOutputStream d; | |
1091 | |
1092 /* Create Idx1 Chunk and write data | |
1093 * ------------- | |
1094 typedef struct _avioldindex { | |
1095 FOURCC fcc; | |
1096 DWORD cb; | |
1097 struct _avioldindex_entry { | |
1098 DWORD dwChunkId; | |
1099 DWORD dwFlags; | |
1100 DWORD dwOffset; | |
1101 DWORD dwSize; | |
1102 } aIndex[]; | |
1103 } AVIOLDINDEX; | |
1104 */ | |
1105 DataChunk idx1Chunk = new DataChunk("idx1"); | |
1106 aviChunk.add(idx1Chunk); | |
1107 d = idx1Chunk.getOutputStream(); | |
1108 long moviListOffset = moviChunk.offset + 8; | |
1109 //moviListOffset = 0; | |
1110 for (Sample f : videoFrames) { | |
1111 | |
1112 d.writeType(f.chunkType); // dwChunkId | |
1113 // Specifies a FOURCC that identifies a stream in the AVI file. The | |
1114 // FOURCC must have the form 'xxyy' where xx is the stream number and yy | |
1115 // is a two-character code that identifies the contents of the stream: | |
1116 // | |
1117 // Two-character code Description | |
1118 // db Uncompressed video frame | |
1119 // dc Compressed video frame | |
1120 // pc Palette change | |
1121 // wb Audio data | |
1122 | |
1123 d.writeUInt((f.chunkType.endsWith("pc") ? 0x100 : 0x0)// | |
1124 | (f.isSync ? 0x10 : 0x0)); // dwFlags | |
1125 // Specifies a bitwise combination of zero or more of the following | |
1126 // flags: | |
1127 // | |
1128 // Value Name Description | |
1129 // 0x10 AVIIF_KEYFRAME The data chunk is a key frame. | |
1130 // 0x1 AVIIF_LIST The data chunk is a 'rec ' list. | |
1131 // 0x100 AVIIF_NO_TIME The data chunk does not affect the timing of the | |
1132 // stream. For example, this flag should be set for | |
1133 // palette changes. | |
1134 | |
1135 d.writeUInt(f.offset - moviListOffset); // dwOffset | |
1136 // Specifies the location of the data chunk in the file. The value | |
1137 // should be specified as an offset, in bytes, from the start of the | |
1138 // 'movi' list; however, in some AVI files it is given as an offset from | |
1139 // the start of the file. | |
1140 | |
1141 d.writeUInt(f.length); // dwSize | |
1142 // Specifies the size of the data chunk, in bytes. | |
1143 } | |
1144 idx1Chunk.finish(); | |
1145 | |
1146 /* Write Data into AVI Main Header Chunk | |
1147 * ------------- | |
1148 * The AVIMAINHEADER structure defines global information in an AVI file. | |
1149 * see http://msdn.microsoft.com/en-us/library/ms779632(VS.85).aspx | |
1150 typedef struct _avimainheader { | |
1151 FOURCC fcc; | |
1152 DWORD cb; | |
1153 DWORD dwMicroSecPerFrame; | |
1154 DWORD dwMaxBytesPerSec; | |
1155 DWORD dwPaddingGranularity; | |
1156 DWORD dwFlags; | |
1157 DWORD dwTotalFrames; | |
1158 DWORD dwInitialFrames; | |
1159 DWORD dwStreams; | |
1160 DWORD dwSuggestedBufferSize; | |
1161 DWORD dwWidth; | |
1162 DWORD dwHeight; | |
1163 DWORD dwReserved[4]; | |
1164 } AVIMAINHEADER; */ | |
1165 avihChunk.seekToStartOfData(); | |
1166 d = avihChunk.getOutputStream(); | |
1167 | |
1168 d.writeUInt((1000000L * (long) timeScale) / (long) frameRate); // dwMicroSecPerFrame | |
1169 // Specifies the number of microseconds between frames. | |
1170 // This value indicates the overall timing for the file. | |
1171 | |
1172 d.writeUInt(0); // dwMaxBytesPerSec | |
1173 // Specifies the approximate maximum data rate of the file. | |
1174 // This value indicates the number of bytes per second the system | |
1175 // must handle to present an AVI sequence as specified by the other | |
1176 // parameters contained in the main header and stream header chunks. | |
1177 | |
1178 d.writeUInt(0); // dwPaddingGranularity | |
1179 // Specifies the alignment for data, in bytes. Pad the data to multiples | |
1180 // of this value. | |
1181 | |
1182 d.writeUInt(0x10); // dwFlags (0x10 == hasIndex) | |
1183 // Contains a bitwise combination of zero or more of the following | |
1184 // flags: | |
1185 // | |
1186 // Value Name Description | |
1187 // 0x10 AVIF_HASINDEX Indicates the AVI file has an index. | |
1188 // 0x20 AVIF_MUSTUSEINDEX Indicates that application should use the | |
1189 // index, rather than the physical ordering of the | |
1190 // chunks in the file, to determine the order of | |
1191 // presentation of the data. For example, this flag | |
1192 // could be used to create a list of frames for | |
1193 // editing. | |
1194 // 0x100 AVIF_ISINTERLEAVED Indicates the AVI file is interleaved. | |
1195 // 0x1000 AVIF_WASCAPTUREFILE Indicates the AVI file is a specially | |
1196 // allocated file used for capturing real-time | |
1197 // video. Applications should warn the user before | |
1198 // writing over a file with this flag set because | |
1199 // the user probably defragmented this file. | |
1200 // 0x20000 AVIF_COPYRIGHTED Indicates the AVI file contains copyrighted | |
1201 // data and software. When this flag is used, | |
1202 // software should not permit the data to be | |
1203 // duplicated. | |
1204 | |
1205 d.writeUInt(videoFrames.size()); // dwTotalFrames | |
1206 // Specifies the total number of frames of data in the file. | |
1207 | |
1208 d.writeUInt(0); // dwInitialFrames | |
1209 // Specifies the initial frame for interleaved files. Noninterleaved | |
1210 // files should specify zero. If you are creating interleaved files, | |
1211 // specify the number of frames in the file prior to the initial frame | |
1212 // of the AVI sequence in this member. | |
1213 // To give the audio driver enough audio to work with, the audio data in | |
1214 // an interleaved file must be skewed from the video data. Typically, | |
1215 // the audio data should be moved forward enough frames to allow | |
1216 // approximately 0.75 seconds of audio data to be preloaded. The | |
1217 // dwInitialRecords member should be set to the number of frames the | |
1218 // audio is skewed. Also set the same value for the dwInitialFrames | |
1219 // member of the AVISTREAMHEADER structure in the audio stream header | |
1220 | |
1221 d.writeUInt(1); // dwStreams | |
1222 // Specifies the number of streams in the file. For example, a file with | |
1223 // audio and video has two streams. | |
1224 | |
1225 d.writeUInt(bufferSize); // dwSuggestedBufferSize | |
1226 // Specifies the suggested buffer size for reading the file. Generally, | |
1227 // this size should be large enough to contain the largest chunk in the | |
1228 // file. If set to zero, or if it is too small, the playback software | |
1229 // will have to reallocate memory during playback, which will reduce | |
1230 // performance. For an interleaved file, the buffer size should be large | |
1231 // enough to read an entire record, and not just a chunk. | |
1232 | |
1233 | |
1234 d.writeUInt(imgWidth); // dwWidth | |
1235 // Specifies the width of the AVI file in pixels. | |
1236 | |
1237 d.writeUInt(imgHeight); // dwHeight | |
1238 // Specifies the height of the AVI file in pixels. | |
1239 | |
1240 d.writeUInt(0); // dwReserved[0] | |
1241 d.writeUInt(0); // dwReserved[1] | |
1242 d.writeUInt(0); // dwReserved[2] | |
1243 d.writeUInt(0); // dwReserved[3] | |
1244 // Reserved. Set this array to zero. | |
1245 | |
1246 /* Write Data into AVI Stream Header Chunk | |
1247 * ------------- | |
1248 * The AVISTREAMHEADER structure contains information about one stream | |
1249 * in an AVI file. | |
1250 * see http://msdn.microsoft.com/en-us/library/ms779638(VS.85).aspx | |
1251 typedef struct _avistreamheader { | |
1252 FOURCC fcc; | |
1253 DWORD cb; | |
1254 FOURCC fccType; | |
1255 FOURCC fccHandler; | |
1256 DWORD dwFlags; | |
1257 WORD wPriority; | |
1258 WORD wLanguage; | |
1259 DWORD dwInitialFrames; | |
1260 DWORD dwScale; | |
1261 DWORD dwRate; | |
1262 DWORD dwStart; | |
1263 DWORD dwLength; | |
1264 DWORD dwSuggestedBufferSize; | |
1265 DWORD dwQuality; | |
1266 DWORD dwSampleSize; | |
1267 struct { | |
1268 short int left; | |
1269 short int top; | |
1270 short int right; | |
1271 short int bottom; | |
1272 } rcFrame; | |
1273 } AVISTREAMHEADER; | |
1274 */ | |
1275 strhChunk.seekToStartOfData(); | |
1276 d = strhChunk.getOutputStream(); | |
1277 d.writeType("vids"); // fccType - vids for video stream | |
1278 // Contains a FOURCC that specifies the type of the data contained in | |
1279 // the stream. The following standard AVI values for video and audio are | |
1280 // defined: | |
1281 // | |
1282 // FOURCC Description | |
1283 // 'auds' Audio stream | |
1284 // 'mids' MIDI stream | |
1285 // 'txts' Text stream | |
1286 // 'vids' Video stream | |
1287 | |
1288 switch (videoFormat) { | |
1289 case RAW: | |
1290 d.writeType("DIB "); // fccHandler - DIB for Raw RGB | |
1291 break; | |
1292 case RLE: | |
1293 d.writeType("RLE "); // fccHandler - Microsoft RLE | |
1294 break; | |
1295 case JPG: | |
1296 d.writeType("MJPG"); // fccHandler - MJPG for Motion JPEG | |
1297 break; | |
1298 case PNG: | |
1299 default: | |
1300 d.writeType("png "); // fccHandler - png for PNG | |
1301 break; | |
1302 } | |
1303 // Optionally, contains a FOURCC that identifies a specific data | |
1304 // handler. The data handler is the preferred handler for the stream. | |
1305 // For audio and video streams, this specifies the codec for decoding | |
1306 // the stream. | |
1307 | |
1308 if (imgDepth <= 8) { | |
1309 d.writeUInt(0x00010000); // dwFlags - AVISF_VIDEO_PALCHANGES | |
1310 } else { | |
1311 d.writeUInt(0); // dwFlags | |
1312 } | |
1313 | |
1314 // Contains any flags for the data stream. The bits in the high-order | |
1315 // word of these flags are specific to the type of data contained in the | |
1316 // stream. The following standard flags are defined: | |
1317 // | |
1318 // Value Name Description | |
1319 // AVISF_DISABLED 0x00000001 Indicates this stream should not | |
1320 // be enabled by default. | |
1321 // AVISF_VIDEO_PALCHANGES 0x00010000 | |
1322 // Indicates this video stream contains | |
1323 // palette changes. This flag warns the playback | |
1324 // software that it will need to animate the | |
1325 // palette. | |
1326 | |
1327 d.writeUShort(0); // wPriority | |
1328 // Specifies priority of a stream type. For example, in a file with | |
1329 // multiple audio streams, the one with the highest priority might be | |
1330 // the default stream. | |
1331 | |
1332 d.writeUShort(0); // wLanguage | |
1333 // Language tag. | |
1334 | |
1335 d.writeUInt(0); // dwInitialFrames | |
1336 // Specifies how far audio data is skewed ahead of the video frames in | |
1337 // interleaved files. Typically, this is about 0.75 seconds. If you are | |
1338 // creating interleaved files, specify the number of frames in the file | |
1339 // prior to the initial frame of the AVI sequence in this member. For | |
1340 // more information, see the remarks for the dwInitialFrames member of | |
1341 // the AVIMAINHEADER structure. | |
1342 | |
1343 d.writeUInt(timeScale); // dwScale | |
1344 // Used with dwRate to specify the time scale that this stream will use. | |
1345 // Dividing dwRate by dwScale gives the number of samples per second. | |
1346 // For video streams, this is the frame rate. For audio streams, this | |
1347 // rate corresponds to the time needed to play nBlockAlign bytes of | |
1348 // audio, which for PCM audio is the just the sample rate. | |
1349 | |
1350 d.writeUInt(frameRate); // dwRate | |
1351 // See dwScale. | |
1352 | |
1353 d.writeUInt(0); // dwStart | |
1354 // Specifies the starting time for this stream. The units are defined by | |
1355 // the dwRate and dwScale members in the main file header. Usually, this | |
1356 // is zero, but it can specify a delay time for a stream that does not | |
1357 // start concurrently with the file. | |
1358 | |
1359 d.writeUInt(videoFrames.size()); // dwLength | |
1360 // Specifies the length of this stream. The units are defined by the | |
1361 // dwRate and dwScale members of the stream's header. | |
1362 | |
1363 d.writeUInt(bufferSize); // dwSuggestedBufferSize | |
1364 // Specifies how large a buffer should be used to read this stream. | |
1365 // Typically, this contains a value corresponding to the largest chunk | |
1366 // present in the stream. Using the correct buffer size makes playback | |
1367 // more efficient. Use zero if you do not know the correct buffer size. | |
1368 | |
1369 d.writeInt(-1); // dwQuality | |
1370 // Specifies an indicator of the quality of the data in the stream. | |
1371 // Quality is represented as a number between 0 and 10,000. | |
1372 // For compressed data, this typically represents the value of the | |
1373 // quality parameter passed to the compression software. If set to –1, | |
1374 // drivers use the default quality value. | |
1375 | |
1376 d.writeUInt(0); // dwSampleSize | |
1377 // Specifies the size of a single sample of data. This is set to zero | |
1378 // if the samples can vary in size. If this number is nonzero, then | |
1379 // multiple samples of data can be grouped into a single chunk within | |
1380 // the file. If it is zero, each sample of data (such as a video frame) | |
1381 // must be in a separate chunk. For video streams, this number is | |
1382 // typically zero, although it can be nonzero if all video frames are | |
1383 // the same size. For audio streams, this number should be the same as | |
1384 // the nBlockAlign member of the WAVEFORMATEX structure describing the | |
1385 // audio. | |
1386 | |
1387 d.writeUShort(0); // rcFrame.left | |
1388 d.writeUShort(0); // rcFrame.top | |
1389 d.writeUShort(imgWidth); // rcFrame.right | |
1390 d.writeUShort(imgHeight); // rcFrame.bottom | |
1391 // Specifies the destination rectangle for a text or video stream within | |
1392 // the movie rectangle specified by the dwWidth and dwHeight members of | |
1393 // the AVI main header structure. The rcFrame member is typically used | |
1394 // in support of multiple video streams. Set this rectangle to the | |
1395 // coordinates corresponding to the movie rectangle to update the whole | |
1396 // movie rectangle. Units for this member are pixels. The upper-left | |
1397 // corner of the destination rectangle is relative to the upper-left | |
1398 // corner of the movie rectangle. | |
1399 | |
1400 /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk | |
1401 /* ------------- | |
1402 * see http://msdn.microsoft.com/en-us/library/ms779712(VS.85).aspx | |
1403 typedef struct tagBITMAPINFOHEADER { | |
1404 DWORD biSize; | |
1405 LONG biWidth; | |
1406 LONG biHeight; | |
1407 WORD biPlanes; | |
1408 WORD biBitCount; | |
1409 DWORD biCompression; | |
1410 DWORD biSizeImage; | |
1411 LONG biXPelsPerMeter; | |
1412 LONG biYPelsPerMeter; | |
1413 DWORD biClrUsed; | |
1414 DWORD biClrImportant; | |
1415 } BITMAPINFOHEADER; | |
1416 */ | |
1417 strfChunk.seekToStartOfData(); | |
1418 d = strfChunk.getOutputStream(); | |
1419 d.writeUInt(40); // biSize | |
1420 // Specifies the number of bytes required by the structure. This value | |
1421 // does not include the size of the color table or the size of the color | |
1422 // masks, if they are appended to the end of structure. | |
1423 | |
1424 d.writeInt(imgWidth); // biWidth | |
1425 // Specifies the width of the bitmap, in pixels. | |
1426 | |
1427 d.writeInt(imgHeight); // biHeight | |
1428 // Specifies the height of the bitmap, in pixels. | |
1429 // | |
1430 // For uncompressed RGB bitmaps, if biHeight is positive, the bitmap is | |
1431 // a bottom-up DIB with the origin at the lower left corner. If biHeight | |
1432 // is negative, the bitmap is a top-down DIB with the origin at the | |
1433 // upper left corner. | |
1434 // For YUV bitmaps, the bitmap is always top-down, regardless of the | |
1435 // sign of biHeight. Decoders should offer YUV formats with postive | |
1436 // biHeight, but for backward compatibility they should accept YUV | |
1437 // formats with either positive or negative biHeight. | |
1438 // For compressed formats, biHeight must be positive, regardless of | |
1439 // image orientation. | |
1440 | |
1441 d.writeShort(1); // biPlanes | |
1442 // Specifies the number of planes for the target device. This value must | |
1443 // be set to 1. | |
1444 | |
1445 d.writeShort(imgDepth); // biBitCount | |
1446 // Specifies the number of bits per pixel (bpp). For uncompressed | |
1447 // formats, this value is the average number of bits per pixel. For | |
1448 // compressed formats, this value is the implied bit depth of the | |
1449 // uncompressed image, after the image has been decoded. | |
1450 | |
1451 switch (videoFormat) { | |
1452 case RAW: | |
1453 default: | |
1454 d.writeInt(0); // biCompression - BI_RGB for uncompressed RGB | |
1455 break; | |
1456 case RLE: | |
1457 if (imgDepth == 8) { | |
1458 d.writeInt(1); // biCompression - BI_RLE8 | |
1459 } else if (imgDepth == 4) { | |
1460 d.writeInt(2); // biCompression - BI_RLE4 | |
1461 } else { | |
1462 throw new UnsupportedOperationException("RLE only supports 4-bit and 8-bit images"); | |
1463 } | |
1464 break; | |
1465 case JPG: | |
1466 d.writeType("MJPG"); // biCompression - MJPG for Motion JPEG | |
1467 break; | |
1468 case PNG: | |
1469 d.writeType("png "); // biCompression - png for PNG | |
1470 break; | |
1471 } | |
1472 // For compressed video and YUV formats, this member is a FOURCC code, | |
1473 // specified as a DWORD in little-endian order. For example, YUYV video | |
1474 // has the FOURCC 'VYUY' or 0x56595559. For more information, see FOURCC | |
1475 // Codes. | |
1476 // | |
1477 // For uncompressed RGB formats, the following values are possible: | |
1478 // | |
1479 // Value Description | |
1480 // BI_RGB 0x00000000 Uncompressed RGB. | |
1481 // BI_BITFIELDS 0x00000003 Uncompressed RGB with color masks. | |
1482 // Valid for 16-bpp and 32-bpp bitmaps. | |
1483 // | |
1484 // Note that BI_JPG and BI_PNG are not valid video formats. | |
1485 // | |
1486 // For 16-bpp bitmaps, if biCompression equals BI_RGB, the format is | |
1487 // always RGB 555. If biCompression equals BI_BITFIELDS, the format is | |
1488 // either RGB 555 or RGB 565. Use the subtype GUID in the AM_MEDIA_TYPE | |
1489 // structure to determine the specific RGB type. | |
1490 | |
1491 switch (videoFormat) { | |
1492 case RAW: | |
1493 d.writeInt(0); // biSizeImage | |
1494 break; | |
1495 case RLE: | |
1496 case JPG: | |
1497 case PNG: | |
1498 default: | |
1499 if (imgDepth == 4) { | |
1500 d.writeInt(imgWidth * imgHeight / 2); // biSizeImage | |
1501 } else { | |
1502 int bytesPerPixel = Math.max(1, imgDepth / 8); | |
1503 d.writeInt(imgWidth * imgHeight * bytesPerPixel); // biSizeImage | |
1504 } | |
1505 break; | |
1506 } | |
1507 // Specifies the size, in bytes, of the image. This can be set to 0 for | |
1508 // uncompressed RGB bitmaps. | |
1509 | |
1510 d.writeInt(0); // biXPelsPerMeter | |
1511 // Specifies the horizontal resolution, in pixels per meter, of the | |
1512 // target device for the bitmap. | |
1513 | |
1514 d.writeInt(0); // biYPelsPerMeter | |
1515 // Specifies the vertical resolution, in pixels per meter, of the target | |
1516 // device for the bitmap. | |
1517 | |
1518 d.writeInt(palette == null ? 0 : palette.getMapSize()); // biClrUsed | |
1519 // Specifies the number of color indices in the color table that are | |
1520 // actually used by the bitmap. | |
1521 | |
1522 d.writeInt(0); // biClrImportant | |
1523 // Specifies the number of color indices that are considered important | |
1524 // for displaying the bitmap. If this value is zero, all colors are | |
1525 // important. | |
1526 | |
1527 if (palette != null) { | |
1528 for (int i = 0, n = palette.getMapSize(); i < n; ++i) { | |
1529 /* | |
1530 * typedef struct tagRGBQUAD { | |
1531 BYTE rgbBlue; | |
1532 BYTE rgbGreen; | |
1533 BYTE rgbRed; | |
1534 BYTE rgbReserved; // This member is reserved and must be zero. | |
1535 } RGBQUAD; | |
1536 */ | |
1537 d.write(palette.getBlue(i)); | |
1538 d.write(palette.getGreen(i)); | |
1539 d.write(palette.getRed(i)); | |
1540 d.write(0); | |
1541 } | |
1542 } | |
1543 | |
1544 | |
1545 // ----------------- | |
1546 aviChunk.finish(); | |
1547 } | |
1548 } |