From 1070c8f24e005c2a22cf85ca992ed96ce9c88d40 Mon Sep 17 00:00:00 2001 From: wrandelshofer Date: Thu, 3 Oct 2024 16:48:20 +0200 Subject: [PATCH] Implement MP4Writer. --- .idea/compiler.xml | 3 +- .idea/misc.xml | 2 +- .../org/monte/demo/audiorecorder/Main.java | 13 +- .../org/monte/demo/io/ReadAVIDemo.java | 2 +- .../org/monte/demo/io/TrimDemoMain.java | 6 +- .../javafx/movieplayer/DropFileHandler.java | 7 +- .../movieplayer/MainWindowController.java | 4 +- .../movieplayer/PlayerControlsController.java | 4 +- .../javafx/movieplayer/fxplayer/FXMedia.java | 34 +- .../movieplayer/monteplayer/MonteMedia.java | 3 +- .../monteplayer/MonteMediaView.java | 6 +- .../movieplayer/monteplayer/PlayerEngine.java | 52 +- .../monteplayer/WritableImageCodec.java | 5 +- .../org/monte/demo/movieconverter/Main.java | 2 +- org.monte.demo.moviewriter/pom.xml | 7 +- .../org/monte/demo/moviewriter/Main.java | 146 +- .../demo/moviewriter/ImageComparisonForm.form | 160 ++ .../demo/moviewriter/ImageComparisonForm.java | 268 ++ .../demo/moviewriter/TestMovieWriters.java | 399 +++ .../org/monte/demo/screenrecorder/Main.form | 247 +- .../org/monte/demo/screenrecorder/Main.java | 311 ++- .../codec/video/AmigaBitmapCodec.java | 8 +- .../monte/media/anim/ANIMMovieResources.java | 2 +- .../monte/media/anim/ANIMOutputStream.java | 8 +- .../media/anim/AmigaVideoFormatKeys.java | 8 +- .../monte/media/iff/MC68000InputStream.java | 8 +- .../monte/media/iff/MC68000OutputStream.java | 11 +- .../org/monte/media/pbm/PBMPlayer.java | 2 +- .../org/monte/media/seq/SEQMovieTrack.java | 2 +- .../services/org.monte.media.av.CodecSpi | 4 +- .../org.monte.media.av.MovieWriterSpi | 2 +- .../org.monte.media.jcodec/module-info.java | 8 +- .../{H264Codec.java => JCodecH264Codec.java} | 60 +- ...4CodecSpi.java => JCodecH264CodecSpi.java} | 6 +- ...tureCodec.java => JCodecPictureCodec.java} | 9 +- ...decSpi.java => JCodecPictureCodecSpi.java} | 6 +- .../media/jcodec/impl/SequenceEncoderEx.java | 3 +- .../{MP4Writer.java => JCodecMP4Writer.java} | 13 +- ...WriterSpi.java => JCodecMP4WriterSpi.java} | 10 +- .../media/jmf/codec/video/TSCCCodec.java | 56 +- .../media/screenrecorder/ScreenRecorder.java | 18 +- .../datatransfer/DropFileTransferHandler.java | 2 +- .../FileTextFieldTransferHandler.java | 2 +- org.monte.media/pom.xml | 4 +- .../java/org.monte.media/module-info.java | 2 + .../org/monte/media/av/CodecChain.java | 2 +- .../org/monte/media/av/DefaultMovie.java | 6 +- .../org/monte/media/av/DefaultRegistry.java | 30 +- .../org/monte/media/av/Format.java | 3 +- .../org/monte/media/av/FormatKeys.java | 5 + .../org/monte/media/av/MovieReader.java | 48 +- .../org/monte/media/av/Registry.java | 145 +- .../media/av/codec/audio/AudioFormatKeys.java | 2 +- .../monte/media/av/codec/video/AVIBMPDIB.java | 3 +- .../av/codec/video/ColorAdjustCodec.java | 2 +- .../media/av/codec/video/TechSmithCodec.java | 8 +- .../av/codec/video/TechSmithCodecCore.java | 29 +- .../media/av/codec/video/VideoFormatKeys.java | 5 - .../org/monte/media/avi/AVIInputStream.java | 2 +- .../org/monte/media/avi/AVIOutputStream.java | 17 +- .../org/monte/media/avi/AVIReader.java | 57 +- .../org/monte/media/avi/AVIWriter.java | 60 +- .../monte/media/avi/AbstractAVIStream.java | 21 +- .../media/avi/DataChunkOutputStream.java | 12 +- .../monte/media/avi/codec/video/DIBCodec.java | 12 +- .../org/monte/media/color/ColorModels.java | 8 +- .../org/monte/media/color/WhiteBalance.java | 2 +- .../org/monte/media/color/YccConverters.java | 3 +- .../org/monte/media/exif/EXIFReader.java | 30 +- .../org/monte/media/exif/EXIFTagSet.java | 2 +- .../org/monte/media/image/CMYKImages.java | 2 +- .../media/io/ByteArrayImageInputStream.java | 15 +- .../media/io/ByteArrayImageOutputStream.java | 18 +- .../media/io/FilterImageInputStream.java | 93 +- ...ream.java => FilterImageOutputStream.java} | 49 +- .../monte/media/io/ImageInputStreamImpl2.java | 10 +- .../io/SeekableByteArrayOutputStream.java | 159 -- .../monte/media/io/SubImageInputStream.java | 108 - .../monte/media/jpeg/CMYKJPEGImageReader.java | 7 +- .../org/monte/media/mp4/MP4OutputStream.java | 2421 +++++++++++++++++ .../org/monte/media/mp4/MP4Writer.java | 492 ++++ .../org/monte/media/mp4/MP4WriterSpi.java | 45 + .../org/monte/media/mpo/MPOImageReader.java | 12 +- .../AbstractQTFFMovieStream.java} | 1592 +++++------ .../qtff/AvcDecoderConfigurationRecord.java | 58 + .../QTFFImageInputStream.java | 18 +- .../QTFFImageOutputStream.java} | 223 +- .../media/quicktime/DataAtomInputStream.java | 184 -- .../quicktime/QuickTimeDeserializer.java | 10 +- .../media/quicktime/QuickTimeInputStream.java | 48 +- .../monte/media/quicktime/QuickTimeMeta.java | 56 +- .../quicktime/QuickTimeOutputStream.java | 726 ++++- .../media/quicktime/QuickTimeReader.java | 20 +- .../media/quicktime/QuickTimeSerializer.java | 19 - .../media/quicktime/QuickTimeWriter.java | 235 +- .../media/quicktime/QuickTimeWriterSpi.java | 4 +- .../codec/text/AppleClosedCaptionCodec.java | 2 +- .../codec/text/cta608/Cta608Parser.java | 170 +- .../codec/text/cta608/Cta608Screen.java | 12 +- .../media/quicktime/codec/video/RawCodec.java | 11 +- .../media/riff/RIFFPrimitivesInputStream.java | 8 +- .../org/monte/media/tiff/IFDEntry.java | 3 +- .../org/monte/media/util/ArrayUtil.java | 29 +- .../org/monte/media/util/ByteArray.java | 39 + .../ByteArray.java => util/ByteArrays.java} | 10 +- .../org/monte/media/util/MathUtil.java | 33 + .../monte/media/zipmovie/ZipMovieWriter.java | 5 +- .../av/codec/video/TechSmithCodecTest.java | 14 +- pom.xml | 4 +- 109 files changed, 6697 insertions(+), 2716 deletions(-) create mode 100644 org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.form create mode 100644 org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.java create mode 100755 org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/TestMovieWriters.java rename org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/{H264Codec.java => JCodecH264Codec.java} (74%) rename org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/{H264CodecSpi.java => JCodecH264CodecSpi.java} (66%) rename org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/{PictureCodec.java => JCodecPictureCodec.java} (88%) rename org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/{PictureCodecSpi.java => JCodecPictureCodecSpi.java} (65%) rename org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/{MP4Writer.java => JCodecMP4Writer.java} (97%) rename org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/{MP4WriterSpi.java => JCodecMP4WriterSpi.java} (82%) rename org.monte.media/src/main/java/org.monte.media/org/monte/media/io/{SubImageOutputStream.java => FilterImageOutputStream.java} (60%) delete mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SeekableByteArrayOutputStream.java delete mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageInputStream.java create mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4OutputStream.java create mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4Writer.java create mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4WriterSpi.java rename org.monte.media/src/main/java/org.monte.media/org/monte/media/{quicktime/AbstractQuickTimeStream.java => qtff/AbstractQTFFMovieStream.java} (59%) create mode 100644 org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AvcDecoderConfigurationRecord.java rename org.monte.media/src/main/java/org.monte.media/org/monte/media/{quicktime => qtff}/QTFFImageInputStream.java (88%) rename org.monte.media/src/main/java/org.monte.media/org/monte/media/{quicktime/DataAtomOutputStream.java => qtff/QTFFImageOutputStream.java} (53%) delete mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomInputStream.java delete mode 100755 org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeSerializer.java create mode 100644 org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArray.java rename org.monte.media/src/main/java/org.monte.media/org/monte/media/{io/ByteArray.java => util/ByteArrays.java} (97%) create mode 100644 org.monte.media/src/main/java/org.monte.media/org/monte/media/util/MathUtil.java diff --git a/.idea/compiler.xml b/.idea/compiler.xml index 9b37709..a624838 100644 --- a/.idea/compiler.xml +++ b/.idea/compiler.xml @@ -20,11 +20,11 @@ + - @@ -34,7 +34,6 @@ - diff --git a/.idea/misc.xml b/.idea/misc.xml index 71892c9..c2bb51b 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -13,7 +13,7 @@ - + \ No newline at end of file diff --git a/org.monte.demo.audiorecorder/src/main/java/org.monte.demo.audiorecorder/org/monte/demo/audiorecorder/Main.java b/org.monte.demo.audiorecorder/src/main/java/org.monte.demo.audiorecorder/org/monte/demo/audiorecorder/Main.java index 31cd6a1..8b9589f 100755 --- a/org.monte.demo.audiorecorder/src/main/java/org.monte.demo.audiorecorder/org/monte/demo/audiorecorder/Main.java +++ b/org.monte.demo.audiorecorder/src/main/java/org.monte.demo.audiorecorder/org/monte/demo/audiorecorder/Main.java @@ -19,9 +19,10 @@ import javax.sound.sampled.TargetDataLine; import java.io.File; import java.io.IOException; -import java.text.SimpleDateFormat; +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; -import java.util.Date; import java.util.List; /** @@ -115,8 +116,9 @@ record AudioTargetInfo(Mixer mixer, Line.Info info, AudioFormat format) { * @param args the command line arguments */ public static void main(String[] args) throws IOException, LineUnavailableException { - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd 'at' HH.mm.ss"); - File file = new File(System.getProperty("user.home"), "Movies/AudioRecording " + dateFormat.format(new Date()) + ".avi"); + DateTimeFormatter dateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd 'at' HH.mm.ss").withZone(ZoneId.systemDefault()); + ; + File file = new File(System.getProperty("user.home"), "Movies/AudioRecording " + dateFormat.format(Instant.now()) + ".avi"); if (!file.getParentFile().isDirectory()) { file.getParentFile().mkdirs(); } @@ -128,7 +130,8 @@ public static void main(String[] args) throws IOException, LineUnavailableExcept Mixer mixer = AudioSystem.getMixer(info); for (Line.Info info1 : mixer.getTargetLineInfo()) { System.out.println(" " + info1); - if (info1 instanceof DataLine.Info dlInfo) { + if (info1 instanceof DataLine.Info) { + DataLine.Info dlInfo = (DataLine.Info) info1; for (AudioFormat format : dlInfo.getFormats()) { if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED && format.getSampleRate() != AudioSystem.NOT_SPECIFIED) { System.out.println((targetLines.size() + 1) + ". " + format); diff --git a/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/ReadAVIDemo.java b/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/ReadAVIDemo.java index 8a0d7f5..6481c99 100755 --- a/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/ReadAVIDemo.java +++ b/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/ReadAVIDemo.java @@ -72,7 +72,7 @@ public static void main(String[] args) throws IOException { public void run() { JFrame fr = new JFrame(f.getName()); fr.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); - final JLabel label = new JLabel(new ImageIcon(frames.getFirst())); + final JLabel label = new JLabel(new ImageIcon(frames.get(0))); final JSlider slider = new JSlider(JSlider.HORIZONTAL, 0, frames.size() - 1, 0); slider.addChangeListener(new ChangeListener() { diff --git a/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/TrimDemoMain.java b/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/TrimDemoMain.java index 1e335a3..8385904 100755 --- a/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/TrimDemoMain.java +++ b/org.monte.demo.io/src/main/java/org.monte.demo.io/org/monte/demo/io/TrimDemoMain.java @@ -197,11 +197,11 @@ private static void info(MovieWriter w) throws IOException { */ private static void info(MovieReader in) throws IOException { System.out.println(" Format: " + FormatFormatter.toString(in.getFileFormat())); - System.out.println(" Duration: " + in.getDuration().toDescriptiveString() + " seconds"); + System.out.println(" Duration: " + in.getMovieDuration().toDescriptiveString() + " seconds"); for (int t = 0; t < in.getTrackCount(); t++) { System.out.println(" Track " + t); System.out.println(" Format: " + FormatFormatter.toString(in.getFormat(t))); - System.out.println(" Duration: " + in.getDuration(t).toDescriptiveString() + " seconds"); + System.out.println(" Duration: " + in.getTrackDuration(t).toDescriptiveString() + " seconds"); System.out.println(" Chunk Count: " + in.getChunkCount(t)); } } @@ -349,7 +349,7 @@ private static Rational parseTime(String str, ArrayList infiles, MovieRead return Rational.valueOf(str); } catch (NumberFormatException e) { if (r[0] == null && !infiles.isEmpty()) { - r[0] = Registry.getInstance().getReader(infiles.getFirst()); + r[0] = Registry.getInstance().getReader(infiles.get(0)); } if (r[0] != null) { int t = r[0].findTrack(0, new Format(MediaTypeKey, MediaType.VIDEO)); diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/DropFileHandler.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/DropFileHandler.java index bc9ee81..254c85d 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/DropFileHandler.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/DropFileHandler.java @@ -49,9 +49,12 @@ private void dragExited(DragEvent event) { private void dragDropped(DragEvent event) { Dragboard db = event.getDragboard(); boolean success = false; - if (db.hasFiles() && db.getFiles() instanceof List fileList && !fileList.isEmpty()) { - fileDroppedConsumer.accept(fileList.getFirst()); + if (db.hasFiles()) { + List files = db.getFiles(); + if (files != null && !files.isEmpty()) { + fileDroppedConsumer.accept(files.get(0)); success = true; + } } event.setDropCompleted(success); event.consume(); diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/MainWindowController.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/MainWindowController.java index 7ec08f6..8b3176a 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/MainWindowController.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/MainWindowController.java @@ -120,7 +120,7 @@ void zoomToActualSize(ActionEvent event) { } void zoomTo(double power) { - MediaInterface media = getPlayer() instanceof MediaPlayerInterface p ? p.getMedia() : null; + MediaInterface media = getPlayer() instanceof MediaPlayerInterface ? getPlayer().getMedia() : null; if (media == null) { return; } @@ -135,7 +135,7 @@ private MediaPlayerInterface getPlayer() { } private double getZoomPower() { - MediaInterface media = getPlayer() instanceof MediaPlayerInterface p ? p.getMedia() : null; + MediaInterface media = getPlayer() instanceof MediaPlayerInterface ? getPlayer().getMedia() : null; if (media == null) { return 1; } diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/PlayerControlsController.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/PlayerControlsController.java index 111851b..7e5ea22 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/PlayerControlsController.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/PlayerControlsController.java @@ -393,8 +393,8 @@ private void mouseDragged(MouseEvent event) { double parentHeight = rootPane.getHeight(); int minimumAmountVisible = 10; - double newX = Math.clamp(controllerPane.getLayoutX() + dx, 0 - width + minimumAmountVisible, parentWidth - minimumAmountVisible); - double newY = Math.clamp(controllerPane.getLayoutY() + dy, 0 - height + minimumAmountVisible, parentHeight - minimumAmountVisible); + double newX = MathUtil.clamp(controllerPane.getLayoutX() + dx, 0 - width + minimumAmountVisible, parentWidth - minimumAmountVisible); + double newY = MathUtil.clamp(controllerPane.getLayoutY() + dy, 0 - height + minimumAmountVisible, parentHeight - minimumAmountVisible); AnchorPane.setLeftAnchor(controllerPane, null); AnchorPane.setRightAnchor(controllerPane, null); diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/fxplayer/FXMedia.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/fxplayer/FXMedia.java index ee2c641..8fb06ee 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/fxplayer/FXMedia.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/fxplayer/FXMedia.java @@ -21,6 +21,8 @@ import org.monte.demo.javafx.movieplayer.model.MediaInterface; import org.monte.demo.javafx.movieplayer.model.TrackInterface; +import java.util.Objects; + /** * Adapter for JavaFX {@link Media}. */ @@ -32,11 +34,17 @@ public class FXMedia implements MediaInterface { public FXMedia(Media media) { this.media = media; for (Track track : media.getTracks()) { - switch (track) { - case VideoTrack t -> tracks.add(new FXVideoTrack(t)); - case AudioTrack t -> tracks.add(new FXAudioTrack(t)); - case SubtitleTrack t -> tracks.add(new FXSubtitleTrack(t)); - default -> tracks.add(new FXTrack(track)); + if (Objects.requireNonNull(track) instanceof VideoTrack) { + VideoTrack t = (VideoTrack) Objects.requireNonNull(track); + tracks.add(new FXVideoTrack(t)); + } else if (track instanceof AudioTrack) { + AudioTrack t = (AudioTrack) track; + tracks.add(new FXAudioTrack(t)); + } else if (track instanceof SubtitleTrack) { + SubtitleTrack t = (SubtitleTrack) track; + tracks.add(new FXSubtitleTrack(t)); + } else { + tracks.add(new FXTrack(track)); } } @@ -51,11 +59,17 @@ public void onChanged(Change c) { if (c.wasAdded()) { int i = c.getFrom(); for (Track track : c.getAddedSubList()) { - switch (track) { - case VideoTrack t -> tracks.add(i, new FXVideoTrack(t)); - case AudioTrack t -> tracks.add(i, new FXAudioTrack(t)); - case SubtitleTrack t -> tracks.add(i, new FXSubtitleTrack(t)); - default -> tracks.add(i, new FXTrack(track)); + if (Objects.requireNonNull(track) instanceof VideoTrack) { + VideoTrack t = (VideoTrack) Objects.requireNonNull(track); + tracks.add(i, new FXVideoTrack(t)); + } else if (track instanceof AudioTrack) { + AudioTrack t = (AudioTrack) track; + tracks.add(i, new FXAudioTrack(t)); + } else if (track instanceof SubtitleTrack) { + SubtitleTrack t = (SubtitleTrack) track; + tracks.add(i, new FXSubtitleTrack(t)); + } else { + tracks.add(i, new FXTrack(track)); } i++; } diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMedia.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMedia.java index 58bfbc6..bb5bc14 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMedia.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMedia.java @@ -24,7 +24,8 @@ public MonteMedia(File source) { public void dispose() { for (var tr : tracks) { - if (tr instanceof MonteTrackInterface mtr) { + if (tr instanceof MonteTrackInterface) { + MonteTrackInterface mtr = (MonteTrackInterface) tr; mtr.dispose(); } } diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMediaView.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMediaView.java index b853a79..9714fc9 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMediaView.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/MonteMediaView.java @@ -82,7 +82,8 @@ void initialize() { } private void addTrack(TrackInterface tr) { - if (tr instanceof MonteVideoTrack vt) { + if (tr instanceof MonteVideoTrack) { + MonteVideoTrack vt = (MonteVideoTrack) tr; ImageView imageView = new ImageView(); imageView.imageProperty().bind(vt.videoImageProperty()); Format format = vt.getFormat(); @@ -116,7 +117,8 @@ public void onChanged(Change c) { private void removeTrack(TrackInterface remitem) { Node remove = trackMap.remove(remitem); group.getChildren().remove(remove); - if (remove instanceof ImageView imageView) { + if (remove instanceof ImageView) { + ImageView imageView = (ImageView) remove; imageView.imageProperty().unbind(); } } diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/PlayerEngine.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/PlayerEngine.java index c27ac83..a4dbae7 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/PlayerEngine.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/PlayerEngine.java @@ -48,6 +48,7 @@ import static org.monte.media.av.FormatKeys.MediaTypeKey; import static org.monte.media.av.FormatKeys.MimeTypeKey; import static org.monte.media.av.codec.audio.AudioFormatKeys.ENCODING_PCM_SIGNED; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleRateKey; import static org.monte.media.av.codec.audio.AudioFormatKeys.SignedKey; import static org.monte.media.av.codec.video.VideoFormatKeys.DataClassKey; @@ -145,7 +146,7 @@ protected void doRealizing() throws Exception { runAndWait(() -> { media.setFormat(fileFormat); media.getTracks().addAll(tracks); - media.setDuration(Duration.millis(reader.getDuration().multiply(1000).doubleValue())); + media.setDuration(Duration.millis(reader.getMovieDuration().multiply(1000).doubleValue())); media.setWidth(finalWidth); media.setHeight(finalHeight); player.setCurrentTime(Duration.millis(renderedTime.multiply(1000).doubleValue())); @@ -166,6 +167,7 @@ private MonteAudioTrack realizeAudioTrack(int i, Map metadata, F Format desiredOutputFormat = new Format(MediaTypeKey, FormatKeys.MediaType.AUDIO,// EncodingKey, ENCODING_PCM_SIGNED,// MimeTypeKey, MIME_JAVA,// + SampleRateKey, trackFormat.get(SampleRateKey), SignedKey, true); Codec codec1 = Registry.getInstance().getCodec(format, desiredOutputFormat); if (codec1 != null) { @@ -221,7 +223,8 @@ private MonteVideoTrack realizeVideoTrack(int i, Map metadata, F do { status = codecChain.process(inBuf, outBuf); } while (status == Codec.CODEC_OUTPUT_NOT_FILLED); - if (!outBuf.isFlag(BufferFlag.DISCARD) && outBuf.data instanceof WritableImage wImg) { + if (!outBuf.isFlag(BufferFlag.DISCARD) && outBuf.data instanceof WritableImage) { + WritableImage wImg = (WritableImage) outBuf.data; vTrack.setVideoImage(wImg); vTrack.setRenderedStartTime(outBuf.timeStamp); vTrack.setRenderedEndTime(outBuf.timeStamp.add(outBuf.sampleDuration)); @@ -235,7 +238,8 @@ private MonteVideoTrack realizeVideoTrack(int i, Map metadata, F public Rational getFrameAfter(Rational seconds) { VideoTrackInterface vTrack = null; for (TrackInterface track : media.getTracks()) { - if (track instanceof VideoTrackInterface v) { + if (track instanceof VideoTrackInterface) { + VideoTrackInterface v = (VideoTrackInterface) track; vTrack = v; break; } @@ -245,12 +249,12 @@ public Rational getFrameAfter(Rational seconds) { } int trackID = (int) vTrack.getTrackID(); try { - long sample = reader.timeToSample(trackID, seconds); - Rational time = reader.sampleToTime(trackID, sample); - Rational duration = reader.getDuration(trackID, sample); + long sample = reader.findSampleAtTime(trackID, seconds); + Rational time = reader.getSampleTime(trackID, sample); + Rational duration = reader.getSampleDuration(trackID, sample); Rational sampleEndTime = time.add(duration); if (sampleEndTime.compareTo(seconds) <= 0 && sample == reader.getSampleCount(trackID) - 1) { - return reader.getDuration(); + return reader.getMovieDuration(); } return sampleEndTime; } catch (IOException e) { @@ -261,7 +265,8 @@ public Rational getFrameAfter(Rational seconds) { public Rational getFrameBefore(Rational seconds) { VideoTrackInterface vTrack = null; for (TrackInterface track : media.getTracks()) { - if (track instanceof VideoTrackInterface v) { + if (track instanceof VideoTrackInterface) { + VideoTrackInterface v = (VideoTrackInterface) track; vTrack = v; break; } @@ -271,11 +276,11 @@ public Rational getFrameBefore(Rational seconds) { } int trackID = (int) vTrack.getTrackID(); try { - long sample = reader.timeToSample(trackID, seconds); - Rational time = reader.sampleToTime(trackID, sample); + long sample = reader.findSampleAtTime(trackID, seconds); + Rational time = reader.getSampleTime(trackID, sample); if (sample > 0 && time.compareTo(seconds) >= 0) { sample--; - time = reader.sampleToTime(trackID, sample); + time = reader.getSampleTime(trackID, sample); } return time; } catch (IOException e) { @@ -327,7 +332,7 @@ protected void doStarted() throws Exception { playTime = renderedTime; } // Start from beginning if we are at the end of the movie - Rational playEndTime = reader.getDuration(); + Rational playEndTime = reader.getMovieDuration(); if (playTime.compareTo(Rational.ZERO) < 0 || playTime.compareTo(playEndTime) >= 0) { playTime = Rational.ZERO; } @@ -368,7 +373,8 @@ protected void doStarted() throws Exception { private void stopAudio() { for (var t : media.getTracks()) { - if (t instanceof MonteAudioTrack mat) { + if (t instanceof MonteAudioTrack) { + MonteAudioTrack mat = (MonteAudioTrack) t; mat.interruptWorker(); SourceDataLine sourceDataLine = mat.getSourceDataLine(); if (sourceDataLine != null) { @@ -381,9 +387,10 @@ private void stopAudio() { private void updateBuffers(Rational playTime) throws IOException { for (var track : media.getTracks()) { - if (!(track instanceof MonteTrackInterface tr) || tr.getCodec() == null) { + if (!(track instanceof MonteTrackInterface) || ((MonteTrackInterface) track).getCodec() == null) { continue; } + MonteTrackInterface tr = (MonteTrackInterface) track; Buffer outBuf = tr.getOutBufferA(); if (outBuf.timeStamp.compareTo(playTime) <= 0 && playTime.compareTo(outBuf.getBufferEndTimestamp()) < 0) { @@ -416,16 +423,18 @@ private void renderBuffers(Rational renderTime, boolean playAudio, long currentN private void renderAudioBuffers(Rational renderTime, long currentNanoTime) { for (var track : media.getTracks()) { - if (!(track instanceof MonteAudioTrack tr) || tr.getCodec() == null) { + if (!(track instanceof MonteAudioTrack) || ((MonteAudioTrack) track).getCodec() == null) { continue; } + MonteAudioTrack tr = (MonteAudioTrack) track; Buffer outBuf = tr.getOutBufferA(); if (!outBuf.isFlag(BufferFlag.DISCARD)) { Rational bufferStartTime = outBuf.timeStamp; Rational bufferEndTime = outBuf.getBufferEndTimestamp(); boolean bufferTimeIntersectsPlayTime = renderTime.isInRange(bufferStartTime, bufferEndTime); - if (bufferTimeIntersectsPlayTime && tr.getSourceDataLine() != null && outBuf.data instanceof byte[] byteArray) { + if (bufferTimeIntersectsPlayTime && tr.getSourceDataLine() != null && outBuf.data instanceof byte[]) { + byte[] byteArray = (byte[]) outBuf.data; boolean isRenderTimeValid = tr.renderedUntilNanoTime + (1_000_000_000L / PLAYER_RATE) > currentNanoTime; int skipSamples; @@ -466,10 +475,11 @@ private void renderVideoBuffers(Rational renderTime) { Platform.runLater(() -> { player.setCurrentTime(Duration.seconds(renderTime.doubleValue())); for (var track : media.getTracks()) { - if (!(track instanceof MonteVideoTrack tr) - || tr.getCodec() == null) { + if (!(track instanceof MonteVideoTrack) + || ((MonteVideoTrack) track).getCodec() == null) { continue; } + MonteVideoTrack tr = (MonteVideoTrack) track; Buffer outBuf = tr.getOutBufferA(); if (!outBuf.isFlag(BufferFlag.DISCARD)) { Rational bufferStartTime = outBuf.timeStamp; @@ -477,8 +487,10 @@ private void renderVideoBuffers(Rational renderTime) { boolean bufferTimeIntersectsPlayTime = bufferStartTime.compareTo(renderTime) <= 0 && renderTime.compareTo(bufferEndTime) < 0; if (bufferTimeIntersectsPlayTime - && tr instanceof MonteVideoTrack mvt - && outBuf.data instanceof WritableImage img) { + && tr instanceof MonteVideoTrack + && outBuf.data instanceof WritableImage) { + MonteVideoTrack mvt = tr; + WritableImage img = (WritableImage) outBuf.data; mvt.setVideoImage(img); } tr.setRenderedStartTime(bufferStartTime); diff --git a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/WritableImageCodec.java b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/WritableImageCodec.java index 8760594..c5f5d4f 100644 --- a/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/WritableImageCodec.java +++ b/org.monte.demo.javafx.movieplayer/src/main/java/org.monte.demo.javafx.movieplayer/org/monte/demo/javafx/movieplayer/monteplayer/WritableImageCodec.java @@ -42,10 +42,11 @@ public int process(Buffer in, Buffer out) { out.format = this.outputFormat; - if (!(in.data instanceof BufferedImage b)) { + if (!(in.data instanceof BufferedImage)) { out.setFlag(BufferFlag.DISCARD); return WritableImageCodec.CODEC_FAILED; } + BufferedImage b = (BufferedImage) in.data; // SwingFXUtils is slow if the image has an indexed color model /* @@ -70,7 +71,7 @@ public int process(Buffer in, Buffer out) { }*/ - out.data = SwingFXUtils.toFXImage(b, (out.data instanceof WritableImage w) ? w : null); + out.data = SwingFXUtils.toFXImage(b, (out.data instanceof WritableImage) ? (WritableImage) out.data : null); return WritableImageCodec.CODEC_OK; } } diff --git a/org.monte.demo.movieconverter/src/main/java/org.monte.demo.movieconverter/org/monte/demo/movieconverter/Main.java b/org.monte.demo.movieconverter/src/main/java/org.monte.demo.movieconverter/org/monte/demo/movieconverter/Main.java index 191d79a..9f4f10f 100755 --- a/org.monte.demo.movieconverter/src/main/java/org.monte.demo.movieconverter/org/monte/demo/movieconverter/Main.java +++ b/org.monte.demo.movieconverter/src/main/java/org.monte.demo.movieconverter/org/monte/demo/movieconverter/Main.java @@ -86,7 +86,7 @@ protected Movie construct() throws Exception { MovieReader r = Registry.getInstance().getReader(newFile); if (r == null) throw new IOException("no reader"); DefaultMovie m = new DefaultMovie(); - r.getDuration();// this ensures that we realize the reader! + r.getMovieDuration();// this ensures that we realize the reader! m.setReader(r); return m; } diff --git a/org.monte.demo.moviewriter/pom.xml b/org.monte.demo.moviewriter/pom.xml index 1878995..3fdd077 100644 --- a/org.monte.demo.moviewriter/pom.xml +++ b/org.monte.demo.moviewriter/pom.xml @@ -19,7 +19,7 @@ src/main/java/org.monte.demo.moviewriter - ${basedir}/src/test/java/org.monte.demo.aviwriter + ${basedir}/src/test/java/org.monte.demo.moviewriter src/main/resources/org.monte.demo.moviewriter @@ -50,6 +50,11 @@ + + org.junit.jupiter + junit-jupiter + test + ch.randelshofer org.monte.media diff --git a/org.monte.demo.moviewriter/src/main/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/Main.java b/org.monte.demo.moviewriter/src/main/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/Main.java index 7d09592..0b3fb73 100755 --- a/org.monte.demo.moviewriter/src/main/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/Main.java +++ b/org.monte.demo.moviewriter/src/main/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/Main.java @@ -1,15 +1,13 @@ /* - * @(#)Main.java + * @(#)TestMovieWriters.java * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. */ package org.monte.demo.moviewriter; import org.monte.media.av.Format; import org.monte.media.av.FormatKeys.MediaType; -import org.monte.media.av.MovieReader; import org.monte.media.av.MovieWriter; import org.monte.media.av.Registry; -import org.monte.media.av.codec.video.VideoFormatKeys; import org.monte.media.av.codec.video.VideoFormatKeys.PixelFormat; import org.monte.media.avi.AVIReader; import org.monte.media.avi.AVIWriter; @@ -24,9 +22,11 @@ import java.awt.Stroke; import java.awt.geom.AffineTransform; import java.awt.image.BufferedImage; +import java.awt.image.DataBufferInt; import java.awt.image.IndexColorModel; import java.io.File; import java.io.IOException; +import java.util.Arrays; import static org.monte.media.av.FormatKeys.EncodingKey; import static org.monte.media.av.FormatKeys.FrameRateKey; @@ -85,7 +85,7 @@ private static BufferedImage createImage(Format format) { return img; } - private static void drawAnimationFrame(BufferedImage img, Graphics2D g, double second, int frameIndex, int frameCount) { + private static void drawAnimationFrame(Graphics2D g, double second, int frameIndex, int frameCount) { drawClock(g, 232, 240, 150, second); g.setPaint(Color.WHITE); @@ -128,7 +128,7 @@ private static void drawClockHand(Graphics2D g, int cx, int cy, int radius1, int * @param args the command line arguments */ public static void main(String[] args) { - System.out.println("AVIDemo " + Main.class.getPackage().getImplementationVersion()); + System.out.println("MovieWriterDemo " + Main.class.getPackage().getImplementationVersion()); System.out.println("This is a demo of the Monte Media library."); System.out.println("Copyright © Werner Randelshofer. All Rights Reserved."); System.out.println("License: MIT License"); @@ -136,89 +136,40 @@ public static void main(String[] args) { try { var m = new Main(); - m.test(new File("moviewriterdemo-tscc24.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 24), true); - m.test(new File("moviewriterdemo-h264-motion16.mov"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 16), false); - m.test(new File("moviewriterdemo-h264-motion0.mov"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 0), false); - if (true) return; - m.test(new File("moviewriterdemo-h264-motion16.m4v"), new Format(DepthKey, 24, QualityKey, 0.75f, MotionSearchRangeKey, 16), true); - m.test(new File("moviewriterdemo-h264-motion0.m4v"), new Format(DepthKey, 24, QualityKey, 0.75f, MotionSearchRangeKey, 0), true); - m.test(new File("moviewriterdemo-h264-motion0.avi"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 0), true); - m.test(new File("moviewriterdemo-jpg-q0.75.avi"), new Format(EncodingKey, ENCODING_AVI_MJPG, DepthKey, 24, QualityKey, 0.75f), true); - m.test(new File("moviewriterdemo-png.avi"), new Format(EncodingKey, ENCODING_AVI_PNG, DepthKey, 24), true); - m.test(new File("moviewriterdemo-raw24.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 24), true); - m.test(new File("moviewriterdemo-raw8.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 8), true); - m.test(new File("moviewriterdemo-rle8.avi"), new Format(EncodingKey, ENCODING_AVI_RLE8, DepthKey, 8), true); - m.test(new File("moviewriterdemo-tscc8.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8), true); - m.test(new File("moviewriterdemo-tscc16.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 16), true); - m.test(new File("moviewriterdemo-tscc24.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 24), true); - m.test(new File("moviewriterdemo-raw8gray.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), true); - m.test(new File("moviewriterdemo-rle8gray.avi"), new Format(EncodingKey, ENCODING_AVI_RLE8, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), false); - m.test(new File("moviewriterdemo-tscc8gray.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), true); - //test(new File("avidemo-rle4.avi"), AVIOutputStreamOLD.AVIVideoFormat.RLE, 4, 1f); - m.test(new File("moviewriterdemo-jpg-q0.75.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_JPEG, DepthKey, 24, QualityKey, 0.75f), true); - m.test(new File("moviewriterdemo-png.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_PNG, DepthKey, 24), true); - m.test(new File("moviewriterdemo-raw24.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_RAW, DepthKey, 24), false); - m.test(new File("moviewriterdemo-raw8.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_RAW, DepthKey, 8), false); - m.test(new File("moviewriterdemo-tscc8.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8), true); - m.test(new File("moviewriterdemo-tscc8gray.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8, - PixelFormatKey, VideoFormatKeys.PixelFormat.GRAY), true); - m.test(new File("moviewriterdemo-tscc16.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 16), true); - m.test(new File("moviewriterdemo-rle8.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 8), false); - m.test(new File("moviewriterdemo-rle16.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 16), false); - m.test(new File("moviewriterdemo-rle24.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 24), false); - m.test(new File("moviewriterdemo-png.zip"), new Format(EncodingKey, ENCODING_AVI_PNG, DepthKey, 24), true); + m.test(new File("moviewriterdemo-h264-motion0.mp4"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, QualityKey, 0.75f, MotionSearchRangeKey, 0)); + m.test(new File("moviewriterdemo-h264-motion0.mov"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 0)); + m.test(new File("moviewriterdemo-h264-motion0.avi"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 0)); + m.test(new File("moviewriterdemo-h264-motion16.mp4"), new Format(DepthKey, 24, QualityKey, 0.75f, MotionSearchRangeKey, 16)); + m.test(new File("moviewriterdemo-h264-motion16.mov"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 16)); + m.test(new File("moviewriterdemo-jpg-q0.75.avi"), new Format(EncodingKey, ENCODING_AVI_MJPG, DepthKey, 24, QualityKey, 0.75f)); + m.test(new File("moviewriterdemo-jpg-q0.75.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_JPEG, DepthKey, 24, QualityKey, 0.75f)); + m.test(new File("moviewriterdemo-png.avi"), new Format(EncodingKey, ENCODING_AVI_PNG, DepthKey, 24)); + m.test(new File("moviewriterdemo-png.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_PNG, DepthKey, 24)); + m.test(new File("moviewriterdemo-png.zip"), new Format(EncodingKey, ENCODING_AVI_PNG, DepthKey, 24)); + m.test(new File("moviewriterdemo-raw24.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 24)); + m.test(new File("moviewriterdemo-raw24.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_RAW, DepthKey, 24)); + m.test(new File("moviewriterdemo-raw8.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 8)); + m.test(new File("moviewriterdemo-raw8.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_RAW, DepthKey, 8)); + m.test(new File("moviewriterdemo-raw8gray.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY)); + m.test(new File("moviewriterdemo-rle16.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 16)); + m.test(new File("moviewriterdemo-rle24.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 24)); + m.test(new File("moviewriterdemo-rle8.avi"), new Format(EncodingKey, ENCODING_AVI_RLE8, DepthKey, 8)); + m.test(new File("moviewriterdemo-rle8.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 8)); + m.test(new File("moviewriterdemo-rle8gray.avi"), new Format(EncodingKey, ENCODING_AVI_RLE8, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY)); + m.test(new File("moviewriterdemo-tscc16.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 16)); + m.test(new File("moviewriterdemo-tscc16.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 16)); + m.test(new File("moviewriterdemo-tscc24.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 24)); + m.test(new File("moviewriterdemo-tscc24.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 24)); + m.test(new File("moviewriterdemo-tscc8.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8)); + m.test(new File("moviewriterdemo-tscc8.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8)); + m.test(new File("moviewriterdemo-tscc8gray.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY)); + m.test(new File("moviewriterdemo-tscc8gray.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY)); } catch (IOException ex) { ex.printStackTrace(); } } - private void test(File file, Format format, boolean tryToReadTheFile) throws IOException { - testWriting(file, format); - if (tryToReadTheFile) { - try { - testReading(file); - } catch (UnsupportedOperationException e) { - e.printStackTrace(); - } - } - } - - private static void testReading(File file) throws IOException { - System.out.print("Reading " + file.getAbsolutePath()); - long startTime = System.nanoTime(); - MovieReader in = null; - - try { - // Create the reader - in = Registry.getInstance().getReader(file); - - // Look for the first video track - int track = 0; - while (track < in.getTrackCount() - && in.getFormat(track).get(MediaTypeKey) != MediaType.VIDEO) { - track++; - } - - // Read images from the track - BufferedImage img = null; - do { - img = in.read(track, img); - - //...to do: do something with the image... - } while (img != null); - } catch (IOException e) { - System.out.println("Reading failed " + file + " " + e.getMessage()); - //throw e; - } finally { - // Close the rader - if (in != null) { - in.close(); - } - } - System.out.println(" elapsed " + (System.nanoTime() - startTime) / 1e9 + " seconds"); - } - - private static void testWriting(File file, Format format) throws IOException { + private void test(File file, Format format) throws IOException { System.out.print("Writing " + file.getAbsolutePath()); long startTime = System.nanoTime(); @@ -233,10 +184,7 @@ private static void testWriting(File file, Format format) throws IOException { BufferedImage img = createImage(format); Graphics2D g = img.createGraphics(); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); - var backgroundImage = ImageIO.read(Main.class.getResource("BackgroundImage.png")); - if (backgroundImage != null) { - g.drawImage(backgroundImage, 0, 0, null); - } + drawBackgroundImage(g); MovieWriter out = null; int n = frameRate.multiply(60).intValue(); @@ -250,7 +198,7 @@ private static void testWriting(File file, Format format) throws IOException { // Draw the animation for (int i = 0; i < n; i++) { double t = frameRate.divide(i).doubleValue() + 8 * 3600 + 25 * 60; - drawAnimationFrame(img, g, t, i, n); + drawAnimationFrame(g, t, i, n); // write image to the writer out.write(0, img, 1); @@ -265,6 +213,26 @@ private static void testWriting(File file, Format format) throws IOException { // Dispose the graphics object g.dispose(); } - System.out.println(" elapsed " + (int) ((n * 1e9) / (System.nanoTime() - startTime)) + " fps"); + System.out.println(", " + (int) ((n * 1e9) / (System.nanoTime() - startTime)) + " fps"); + } + + private static int compareImages(BufferedImage expectedImage, BufferedImage actualImage) { + if (expectedImage.getRaster().getDataBuffer() instanceof DataBufferInt && actualImage.getRaster().getDataBuffer() instanceof DataBufferInt) { + DataBufferInt expectedBuffer = (DataBufferInt) expectedImage.getRaster().getDataBuffer(); + DataBufferInt actualBuffer = (DataBufferInt) actualImage.getRaster().getDataBuffer(); + int[] expectedData = expectedBuffer.getData(); + int[] actualData = actualBuffer.getData(); + int mismatch = Arrays.mismatch(expectedData, 0, expectedData.length, actualData, 0, actualData.length); + return mismatch; + } else { + return -2; + } + } + + private static void drawBackgroundImage(Graphics2D g) throws IOException { + var backgroundImage = ImageIO.read(Main.class.getResource("BackgroundImage.png")); + if (backgroundImage != null) { + g.drawImage(backgroundImage, 0, 0, null); + } } } diff --git a/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.form b/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.form new file mode 100644 index 0000000..a06f280 --- /dev/null +++ b/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.form @@ -0,0 +1,160 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.java b/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.java new file mode 100644 index 0000000..6d549e5 --- /dev/null +++ b/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/ImageComparisonForm.java @@ -0,0 +1,268 @@ +/* + * The MIT License + * + * Copyright 2024 wr. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ +package org.monte.demo.moviewriter; + +import javax.swing.ImageIcon; +import java.awt.image.BufferedImage; +import java.io.File; + +/** + * @author wr + */ +public class ImageComparisonForm extends javax.swing.JPanel { + private Runnable onNext; + private Runnable onPrevious; + private Runnable onClose; + + /** + * Creates new form ImageComparisonForm + */ + public ImageComparisonForm() { + initComponents(); + } + + public void setActualImage(BufferedImage image) { + actualImageView.setIcon(new ImageIcon(image)); + } + + public void setExpectedImage(BufferedImage image) { + expectedImageView.setIcon(new ImageIcon(image)); + } + + public void setDiffImage(BufferedImage image) { + diffImageView.setIcon(new ImageIcon(image)); + } + + public void setFile(File file) { + fileField.setText(file.getName()); + } + + public void setFrame(long frame) { + frameField.setText(Long.toString(frame)); + } + + public void setOnNext(Runnable onNext) { + this.onNext = onNext; + } + + public void setOnPrevious(Runnable onPrevious) { + this.onPrevious = onPrevious; + } + + public void setOnClose(Runnable onClose) { + this.onClose = onClose; + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + java.awt.GridBagConstraints gridBagConstraints; + + fileLabel = new javax.swing.JLabel(); + frameLabel = new javax.swing.JLabel(); + fileField = new javax.swing.JTextField(); + frameField = new javax.swing.JTextField(); + imagesPanel = new javax.swing.JPanel(); + expectedImageLabel = new javax.swing.JLabel(); + actualImageLabel = new javax.swing.JLabel(); + diffImageLabel = new javax.swing.JLabel(); + expectedImageView = new javax.swing.JLabel(); + actualImageView = new javax.swing.JLabel(); + diffImageView = new javax.swing.JLabel(); + buttonsPanel = new javax.swing.JPanel(); + previousButton = new javax.swing.JButton(); + nextButton = new javax.swing.JButton(); + closeButton = new javax.swing.JButton(); + + setLayout(new java.awt.GridBagLayout()); + + fileLabel.setText("File:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(3, 6, 0, 0); + add(fileLabel, gridBagConstraints); + + frameLabel.setText("Frame:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 1; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 6, 0, 0); + add(frameLabel, gridBagConstraints); + + fileField.setEditable(false); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(0, 6, 0, 0); + add(fileField, gridBagConstraints); + + frameField.setEditable(false); + frameField.setColumns(10); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 1; + gridBagConstraints.ipadx = 100; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 6, 0, 0); + add(frameField, gridBagConstraints); + + imagesPanel.setLayout(new java.awt.GridBagLayout()); + + expectedImageLabel.setText("Expected Image:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + imagesPanel.add(expectedImageLabel, gridBagConstraints); + + actualImageLabel.setText("Actual Image:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 6, 0, 0); + imagesPanel.add(actualImageLabel, gridBagConstraints); + + diffImageLabel.setText("Diff Image:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 2; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 6, 0, 0); + imagesPanel.add(diffImageLabel, gridBagConstraints); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 1; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(6, 0, 0, 0); + imagesPanel.add(expectedImageView, gridBagConstraints); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 1; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(6, 6, 0, 0); + imagesPanel.add(actualImageView, gridBagConstraints); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 2; + gridBagConstraints.gridy = 1; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(6, 6, 0, 0); + imagesPanel.add(diffImageView, gridBagConstraints); + + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 2; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(12, 6, 6, 6); + add(imagesPanel, gridBagConstraints); + + buttonsPanel.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.RIGHT)); + + previousButton.setText("Previous"); + previousButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + previousButtonActionPerformed(evt); + } + }); + buttonsPanel.add(previousButton); + + nextButton.setText("Next"); + nextButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + nextButtonActionPerformed(evt); + } + }); + buttonsPanel.add(nextButton); + + closeButton.setText("Close"); + closeButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + closeButtonActionPerformed(evt); + } + }); + buttonsPanel.add(closeButton); + + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 3; + gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + add(buttonsPanel, gridBagConstraints); + }// //GEN-END:initComponents + + private void previousButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_previousButtonActionPerformed + if (onPrevious != null) onPrevious.run(); + }//GEN-LAST:event_previousButtonActionPerformed + + private void nextButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nextButtonActionPerformed + if (onNext != null) onNext.run(); + }//GEN-LAST:event_nextButtonActionPerformed + + private void closeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_closeButtonActionPerformed + if (onClose != null) onClose.run(); + }//GEN-LAST:event_closeButtonActionPerformed + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JLabel actualImageLabel; + private javax.swing.JLabel actualImageView; + private javax.swing.JPanel buttonsPanel; + private javax.swing.JButton closeButton; + private javax.swing.JLabel diffImageLabel; + private javax.swing.JLabel diffImageView; + private javax.swing.JLabel expectedImageLabel; + private javax.swing.JLabel expectedImageView; + private javax.swing.JTextField fileField; + private javax.swing.JLabel fileLabel; + private javax.swing.JTextField frameField; + private javax.swing.JLabel frameLabel; + private javax.swing.JPanel imagesPanel; + private javax.swing.JButton nextButton; + private javax.swing.JButton previousButton; + // End of variables declaration//GEN-END:variables +} diff --git a/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/TestMovieWriters.java b/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/TestMovieWriters.java new file mode 100755 index 0000000..f453603 --- /dev/null +++ b/org.monte.demo.moviewriter/src/test/java/org.monte.demo.moviewriter/org/monte/demo/moviewriter/TestMovieWriters.java @@ -0,0 +1,399 @@ +/* + * @(#)TestMovieWriters.java + * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + */ +package org.monte.demo.moviewriter; + +import org.junit.jupiter.api.Test; +import org.monte.media.av.Buffer; +import org.monte.media.av.BufferFlag; +import org.monte.media.av.Codec; +import org.monte.media.av.Format; +import org.monte.media.av.FormatKeys.MediaType; +import org.monte.media.av.MovieReader; +import org.monte.media.av.MovieWriter; +import org.monte.media.av.Registry; +import org.monte.media.av.codec.video.VideoFormatKeys.PixelFormat; +import org.monte.media.avi.AVIReader; +import org.monte.media.avi.AVIWriter; +import org.monte.media.color.Colors; +import org.monte.media.math.Rational; + +import javax.imageio.ImageIO; +import javax.swing.JFrame; +import javax.swing.SwingUtilities; +import java.awt.BasicStroke; +import java.awt.Color; +import java.awt.Graphics2D; +import java.awt.RenderingHints; +import java.awt.Stroke; +import java.awt.event.WindowAdapter; +import java.awt.event.WindowEvent; +import java.awt.geom.AffineTransform; +import java.awt.image.BufferedImage; +import java.awt.image.DataBufferByte; +import java.awt.image.DataBufferInt; +import java.awt.image.DataBufferShort; +import java.awt.image.IndexColorModel; +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; + +import static org.monte.media.av.FormatKeys.EncodingKey; +import static org.monte.media.av.FormatKeys.FrameRateKey; +import static org.monte.media.av.FormatKeys.MediaTypeKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.DepthKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_AVC1; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_AVI_DIB; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_AVI_MJPG; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_AVI_PNG; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_AVI_RLE8; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_BUFFERED_IMAGE; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_QUICKTIME_ANIMATION; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_QUICKTIME_JPEG; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_QUICKTIME_PNG; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_QUICKTIME_RAW; +import static org.monte.media.av.codec.video.VideoFormatKeys.HeightKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.MotionSearchRangeKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.PixelFormatKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.QualityKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.WidthKey; + +/** + * Demonstrates the use of {@link AVIReader} and {@link AVIWriter}. + * + * @author Werner Randelshofer + */ +public class TestMovieWriters { + + private final static boolean INTERACTIVE = false; + private static final int CLOCK_START_TIME = 8 * 3600 + 25 * 60; + + /** + * Creates a buffered image of the specified format. + */ + private static BufferedImage createImage(Format format) { + int depth = format.get(DepthKey); + int width = format.get(WidthKey); + int height = format.get(HeightKey); + PixelFormat pixelFormat = format.get(PixelFormatKey); + + BufferedImage img; + switch (depth) { + case 24: + default: { + img = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); + break; + } + case 8: + if (pixelFormat == PixelFormat.GRAY) { + img = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY); + break; + } else { + IndexColorModel palette = Colors.createMacColors(); + img = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_INDEXED, palette); + break; + } + } + return img; + } + + private static void drawAnimationFrame(Graphics2D g, double second, int frameIndex, int frameCount) { + drawClock(g, 232, 240, 150, second); + + g.setPaint(Color.WHITE); + g.fillRect(472, 10, 168, 110); + g.setPaint(Color.BLACK); + g.drawString("Frame " + (frameIndex + 1) + " of " + frameCount, 473, 24); + } + + private static void drawClock(Graphics2D g, int cx, int cy, int radius, double timeInSeconds) { + g.setPaint(Color.WHITE); + g.fillOval(cx - radius, cy - radius, radius * 2, radius * 2); + + double timeInMinutes = timeInSeconds / 60.0; + double timeInHours = timeInMinutes / 60.0; + drawClockHand(g, cx, cy, -10, radius / 2, new BasicStroke(20, BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL), Color.BLACK, (timeInHours) * (Math.PI * 2.0 / 12.0)); + drawClockHand(g, cx, cy, -10, radius - 20, new BasicStroke(20, BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL), new Color(0x1a1a1a), (timeInMinutes) * (Math.PI * 2.0 / 60.0)); + drawClockHand(g, cx, cy, -64, radius - 1, new BasicStroke(6, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL), Color.RED, (timeInSeconds) * (Math.PI * 2.0 / +60.0)); + drawClockHand(g, cx, cy, -64, -24, new BasicStroke(20, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL), Color.RED, (timeInSeconds) * (Math.PI * 2.0 / +60.0)); + // Draw plug + int plugRadius = 12; + g.setPaint(Color.WHITE); + g.fillOval(cx - plugRadius, cy - plugRadius, plugRadius * 2, plugRadius * 2); + g.setStroke(new BasicStroke(10)); + g.setPaint(new Color(0x333333)); + g.drawOval(cx - plugRadius, cy - plugRadius, plugRadius * 2, plugRadius * 2); + } + + private static void drawClockHand(Graphics2D g, int cx, int cy, int radius1, int radius2, Stroke stroke, Color color, double theta) { + AffineTransform tx = new AffineTransform(); + tx.setToRotation(theta % (Math.PI * 2), cx, cy); + g.setTransform(tx); + g.setColor(color); + g.setStroke(stroke); + g.drawLine(cx, cy - radius1, cx, cy - radius2); + tx.setToIdentity(); + g.setTransform(tx); + } + + @Test + public void testMovieWriter() throws Exception { + + + try { + var m = new TestMovieWriters(); + m.test(new File("moviewriterdemo-png.avi"), new Format(EncodingKey, ENCODING_AVI_PNG, DepthKey, 24), true); + m.test(new File("moviewriterdemo-h264-motion0.avi"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 0), true); + m.test(new File("moviewriterdemo-h264-motion0.mp4"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, QualityKey, 0.75f, MotionSearchRangeKey, 0), true); + m.test(new File("moviewriterdemo-h264-motion0.mov"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 0), false); + m.test(new File("moviewriterdemo-h264-motion16.mp4"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, QualityKey, 0.75f, MotionSearchRangeKey, 16), true); + m.test(new File("moviewriterdemo-h264-motion16.mov"), new Format(EncodingKey, ENCODING_AVC1, DepthKey, 24, MotionSearchRangeKey, 16), false); + m.test(new File("moviewriterdemo-jpg-q0.75.avi"), new Format(EncodingKey, ENCODING_AVI_MJPG, DepthKey, 24, QualityKey, 0.75f), true); + m.test(new File("moviewriterdemo-jpg-q0.75.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_JPEG, DepthKey, 24, QualityKey, 0.75f), true); + m.test(new File("moviewriterdemo-png.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_PNG, DepthKey, 24), true); + m.test(new File("moviewriterdemo-png.zip"), new Format(EncodingKey, ENCODING_AVI_PNG, DepthKey, 24), true); + m.test(new File("moviewriterdemo-raw24.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 24), true); + m.test(new File("moviewriterdemo-raw24.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_RAW, DepthKey, 24), false); + m.test(new File("moviewriterdemo-raw8.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 8), true); + m.test(new File("moviewriterdemo-raw8.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_RAW, DepthKey, 8), false); + m.test(new File("moviewriterdemo-raw8gray.avi"), new Format(EncodingKey, ENCODING_AVI_DIB, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), true); + m.test(new File("moviewriterdemo-rle16.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 16), false); + m.test(new File("moviewriterdemo-rle24.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 24), false); + m.test(new File("moviewriterdemo-rle8.avi"), new Format(EncodingKey, ENCODING_AVI_RLE8, DepthKey, 8), true); + m.test(new File("moviewriterdemo-rle8.mov"), new Format(EncodingKey, ENCODING_QUICKTIME_ANIMATION, DepthKey, 8), false); + m.test(new File("moviewriterdemo-rle8gray.avi"), new Format(EncodingKey, ENCODING_AVI_RLE8, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), false); + m.test(new File("moviewriterdemo-tscc16.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 16), true); + m.test(new File("moviewriterdemo-tscc16.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 16), true); + m.test(new File("moviewriterdemo-tscc24.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 24), true); + m.test(new File("moviewriterdemo-tscc24.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 24), true); + m.test(new File("moviewriterdemo-tscc8.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8), true); + m.test(new File("moviewriterdemo-tscc8.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8), true); + m.test(new File("moviewriterdemo-tscc8gray.avi"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), true); + m.test(new File("moviewriterdemo-tscc8gray.mov"), new Format(EncodingKey, ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE, DepthKey, 8, PixelFormatKey, PixelFormat.GRAY), true); + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + private void test(File file, Format format, boolean tryToReadTheFile) throws Exception { + testWriting(file, format); + if (tryToReadTheFile) { + testReading(file); + } + } + + private static void testReading(File file) throws IOException, ExecutionException, InterruptedException { + System.out.println("Reading " + file.getAbsolutePath()); + long startTime = System.nanoTime(); + MovieReader in = null; + int count = 0; + + try { + // Create the reader + in = Registry.getInstance().getReader(file); + + // Look for the first video track + int track = 0; + while (track < in.getTrackCount() + && in.getFormat(track).get(MediaTypeKey) != MediaType.VIDEO) { + track++; + } + + // Read images from the track and compare with expected image + Buffer inbuf = new Buffer(); + Buffer outbuf = new Buffer(); + BufferedImage actualImage = null; + BufferedImage expectedImage = createImage(in.getFormat(track)); + Graphics2D g = expectedImage.createGraphics(); + g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); + drawBackgroundImage(g); + Codec codec = Registry.getInstance().getCodec(in.getFormat(track), new Format(EncodingKey, ENCODING_BUFFERED_IMAGE)); + + if (codec == null) { + System.out.println(" Could not find a codec"); + } + Loop: + while (true) { + in.read(track, inbuf); + if (inbuf.isFlag(BufferFlag.END_OF_MEDIA)) { + break; + } + count++; + if (codec != null) { + drawAnimationFrame(g, inbuf.timeStamp.doubleValue() + CLOCK_START_TIME, + (int) inbuf.sequenceNumber, in.getSampleCount(track)); + codec.process(inbuf, outbuf); + actualImage = (BufferedImage) outbuf.data; + BufferedImage diffImage = compareImages(expectedImage, actualImage); + if (diffImage != null) { + System.out.println(" Image " + inbuf.sequenceNumber + " does not match the expected image."); + var finalActualImage = actualImage; + if (INTERACTIVE) { + CompletableFuture future = new CompletableFuture<>(); + SwingUtilities.invokeLater(() -> { + JFrame f = new JFrame(); + f.addWindowListener(new WindowAdapter() { + @Override + public void windowClosing(WindowEvent e) { + f.dispose(); + future.complete(null); + } + }); + f.setTitle(file.getName()); + ImageComparisonForm form = new ImageComparisonForm(); + f.setContentPane(form); + form.setFile(file); + form.setFrame((int) outbuf.sequenceNumber); + form.setExpectedImage(expectedImage); + form.setActualImage(finalActualImage); + form.setDiffImage(diffImage); + form.setOnClose(() -> { + f.dispose(); + future.complete(null); + }); + f.setSize(640, 480); + f.setVisible(true); + }); + future.get(); + } + break Loop; + } + + } + } + } catch (IOException e) { + System.out.println(" Reading failed " + e.getMessage()); + //throw e; + } finally { + // Close the reader + if (in != null) { + in.close(); + } + } + System.out.println(" " + (System.nanoTime() - startTime) / 1e9 + " seconds for " + count + " frames"); + } + + /** + * Compares two images. + *

+ * Returns the following results: + *

+ *
-1
The images are equal
+ *
-2
Comparison failed
+ *
≥ 0
Index of the first mismatching pixel
+ *
+ * + * @param expectedImage expected image + * @param actualImage actual image + * @return diffImage or null if images are equal + */ + private static BufferedImage compareImages(BufferedImage expectedImage, BufferedImage actualImage) { + if (expectedImage.getRaster().getDataBuffer() instanceof DataBufferInt) { + return compareImagesInt(expectedImage, actualImage); + } else if (expectedImage.getRaster().getDataBuffer() instanceof DataBufferByte) { + return compareImagesByte(expectedImage, actualImage); + } else if (expectedImage.getRaster().getDataBuffer() instanceof DataBufferShort) { + return compareImagesShort(expectedImage, actualImage); + } else { + throw new UnsupportedOperationException("unsupported data buffer " + expectedImage.getRaster().getDataBuffer().getClass()); + } + } + + private static BufferedImage compareImagesByte(BufferedImage expectedImage, BufferedImage actualImage) { + return null; + } + + private static BufferedImage compareImagesShort(BufferedImage expectedImage, BufferedImage actualImage) { + return null; + } + + private static BufferedImage compareImagesInt(BufferedImage expectedImage, BufferedImage actualImage) { + // convert actual image to same format as expected image + BufferedImage convertedImage = new BufferedImage(actualImage.getWidth(), actualImage.getHeight(), expectedImage.getType()); + Graphics2D g = convertedImage.createGraphics(); + g.drawImage(actualImage, 0, 0, null); + g.dispose(); + + BufferedImage diffImage = new BufferedImage(actualImage.getWidth(), actualImage.getHeight(), expectedImage.getType()); + int[] dDat = ((DataBufferInt) diffImage.getRaster().getDataBuffer()).getData(); + int[] eDat = ((DataBufferInt) expectedImage.getRaster().getDataBuffer()).getData(); + int[] aDat = ((DataBufferInt) convertedImage.getRaster().getDataBuffer()).getData(); + + // Erase alpha bits in expected image and actual image + for (int i = 0; i < eDat.length; i++) { + eDat[i] = eDat[i] & 0xffffff; + aDat[i] = aDat[i] & 0xffffff; + } + + // Search for mismatches + int mismatch = Arrays.mismatch(eDat, 0, eDat.length, aDat, 0, aDat.length); + boolean hasDiff = mismatch != -1; + if (hasDiff) { + System.out.println(" index=" + mismatch + " expected:" + Integer.toHexString(eDat[mismatch]) + " actual: " + Integer.toHexString(aDat[mismatch])); + } + while (mismatch != -1) { + dDat[mismatch] = 0xffff0000; + int result = Arrays.mismatch(eDat, mismatch + 1, eDat.length, aDat, mismatch + 1, aDat.length); + mismatch = result == -1 ? -1 : mismatch + 1 + result; + } + return hasDiff ? diffImage : null; + } + + private static void testWriting(File file, Format format) throws IOException { + System.out.print("Writing " + file.getAbsolutePath()); + long startTime = System.nanoTime(); + + // Make the format more specific + Rational frameRate = new Rational(10, 1); + format = format.prepend(MediaTypeKey, MediaType.VIDEO, // + FrameRateKey, frameRate,// + WidthKey, 640, // + HeightKey, 480); + + // Create a buffered image for this format + BufferedImage img = createImage(format); + Graphics2D g = img.createGraphics(); + g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); + drawBackgroundImage(g); + + MovieWriter out = null; + int n = 3; + try { + // Create the writer + out = Registry.getInstance().getWriter(file); + + // Add a track to the writer + out.addTrack(format); + + // Draw the animation + for (int i = 0; i < n; i++) { + double t = frameRate.divide(i).doubleValue() + CLOCK_START_TIME; + drawAnimationFrame(g, t, i, n); + + // write image to the writer + out.write(0, img, 1); + } + + } finally { + // Close the writer + if (out != null) { + out.close(); + } + + // Dispose the graphics object + g.dispose(); + } + System.out.println(", " + (int) ((n * 1e9) / (System.nanoTime() - startTime)) + " fps"); + } + + private static void drawBackgroundImage(Graphics2D g) throws IOException { + var backgroundImage = ImageIO.read(TestMovieWriters.class.getResource("BackgroundImage.png")); + if (backgroundImage != null) { + g.drawImage(backgroundImage, 0, 0, null); + } + } +} diff --git a/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.form b/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.form index 33eb7d1..a241d2e 100755 --- a/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.form +++ b/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.form @@ -26,129 +26,17 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + @@ -163,11 +51,21 @@ + + + + + + + + + + @@ -179,6 +77,11 @@ + + + + + @@ -187,6 +90,11 @@ + + + + + @@ -195,11 +103,21 @@ + + + + + + + + + + @@ -211,25 +129,55 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -239,11 +187,21 @@ + + + + + + + + + + @@ -255,6 +213,11 @@ + + + + + @@ -263,17 +226,32 @@ + + + + + + + + + + + + + + + @@ -284,6 +262,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.java b/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.java index 27949c9..9e076b1 100755 --- a/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.java +++ b/org.monte.demo.screenrecorder/src/main/java/org.monte.demo.screenrecorder/org/monte/demo/screenrecorder/Main.java @@ -130,7 +130,7 @@ public void stateChanged(ChangeEvent e) { private String format; private String encoding; private int cursor; - private int audioRate; + private String audioRate; private int audioSource; private int area; private double screenRate; @@ -283,8 +283,10 @@ public void actionPerformed(ActionEvent e) { mouseRateField.setModel(mouseRateModel); audioSourceChoice.setModel(new DefaultComboBoxModel<>(getAudioSources())); - audioSource = Math.clamp(prefs.getInt("ScreenRecording.audioSource", 0), 0, audioSourceChoice.getItemCount() - 1); + audioSource = MathUtil.clamp(prefs.getInt("ScreenRecording.audioSource", 0), 0, audioSourceChoice.getItemCount() - 1); audioSourceChoice.setSelectedIndex(audioSource); + audioRate = prefs.get("AudioRate", audioRateChoice.getItemAt(0)); + audioRateChoice.setSelectedIndex(findIndex(audioRate, audioRateChoice.getModel())); Dimension customDim = new Dimension(prefs.getInt("ScreenRecording.customAreaWidth", 1024), prefs.getInt("ScreenRecording.customAreaHeight", 768)); @@ -333,6 +335,7 @@ private int findIndex(String value, ComboBoxModel model) { @SuppressWarnings("unchecked") // //GEN-BEGIN:initComponents private void initComponents() { + java.awt.GridBagConstraints gridBagConstraints; formatLabel = new javax.swing.JLabel(); formatChoice = new javax.swing.JComboBox(); @@ -354,6 +357,8 @@ private void initComponents() { stateLabel = new javax.swing.JLabel(); audioSourceLabel = new javax.swing.JLabel(); audioSourceChoice = new javax.swing.JComboBox(); + jLabel1 = new javax.swing.JLabel(); + audioRateChoice = new javax.swing.JComboBox<>(); FormListener formListener = new FormListener(); @@ -361,142 +366,222 @@ private void initComponents() { setTitle("Monte Screen Recorder"); setResizable(false); addWindowListener(formListener); + getContentPane().setLayout(new java.awt.GridBagLayout()); formatLabel.setText("Format:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridwidth = 5; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 6, 0, 0); + getContentPane().add(formatLabel, gridBagConstraints); formatChoice.setModel(new javax.swing.DefaultComboBoxModel(new String[]{"MP4", "AVI", "QuickTime"})); formatChoice.addActionListener(formListener); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 5; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.ipadx = 13; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 18, 0, 0); + getContentPane().add(formatChoice, gridBagConstraints); colorsLabel.setText("Colors:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 4; + gridBagConstraints.gridwidth = 3; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(4, 6, 0, 0); + getContentPane().add(colorsLabel, gridBagConstraints); colorsChoice.setModel(new javax.swing.DefaultComboBoxModel(new String[]{"Hundreds", "Thousands", "Millions"})); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 5; + gridBagConstraints.gridy = 4; + gridBagConstraints.gridheight = 2; + gridBagConstraints.ipadx = 11; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(1, 18, 0, 0); + getContentPane().add(colorsChoice, gridBagConstraints); infoLabel.setFont(new java.awt.Font("Lucida Grande", 0, 11)); // NOI18N infoLabel.setText("The recording will be stored in the folder \"Movies\".
Drop a folder on this text to change the storage location.
A new file will be created every hour or when the file size limit is reached.

This window will be minized before the recording starts.
To stop the recording, restore this window and press the Stop button.
"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 16; + gridBagConstraints.gridwidth = 82; + gridBagConstraints.ipadx = 363; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(18, 6, 0, 6); + getContentPane().add(infoLabel, gridBagConstraints); startStopButton.setText("Start"); startStopButton.addActionListener(formListener); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 33; + gridBagConstraints.gridy = 17; + gridBagConstraints.gridwidth = 48; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 12, 6, 0); + getContentPane().add(startStopButton, gridBagConstraints); mouseLabel.setText("Mouse:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 8; + gridBagConstraints.gridwidth = 4; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 6, 0, 0); + getContentPane().add(mouseLabel, gridBagConstraints); cursorChoice.setModel(new javax.swing.DefaultComboBoxModel(new String[]{"No Cursor", "Black Cursor", "White Cursor"})); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 5; + gridBagConstraints.gridy = 8; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 18, 0, 0); + getContentPane().add(cursorChoice, gridBagConstraints); screenRateLabel.setText("Screen Rate:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 6; + gridBagConstraints.gridy = 4; + gridBagConstraints.gridwidth = 6; + gridBagConstraints.gridheight = 3; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 18, 0, 0); + getContentPane().add(screenRateLabel, gridBagConstraints); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 15; + gridBagConstraints.gridy = 4; + gridBagConstraints.gridwidth = 37; + gridBagConstraints.gridheight = 4; + gridBagConstraints.ipadx = 13; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 13, 0, 0); + getContentPane().add(screenRateField, gridBagConstraints); mouseRateLabel.setText("Mouse Rate:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 8; + gridBagConstraints.gridy = 8; + gridBagConstraints.gridwidth = 4; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 0); + getContentPane().add(mouseRateLabel, gridBagConstraints); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 15; + gridBagConstraints.gridy = 8; + gridBagConstraints.gridwidth = 37; + gridBagConstraints.gridheight = 2; + gridBagConstraints.ipadx = 13; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 13, 0, 0); + getContentPane().add(mouseRateField, gridBagConstraints); encodingLabel.setText("Encoding:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 6; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridwidth = 3; + gridBagConstraints.gridheight = 3; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(14, 18, 0, 0); + getContentPane().add(encodingLabel, gridBagConstraints); encodingChoice.setModel(new javax.swing.DefaultComboBoxModel(new String[]{"H264"})); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 15; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridwidth = 19; + gridBagConstraints.gridheight = 4; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(11, 13, 0, 0); + getContentPane().add(encodingChoice, gridBagConstraints); areaLabel.setText("Area:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 14; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 6, 0, 0); + getContentPane().add(areaLabel, gridBagConstraints); areaChoice.setModel(new javax.swing.DefaultComboBoxModel(new String[]{"Entire Screen", "0 0, 1024 x 768", " "})); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 5; + gridBagConstraints.gridy = 14; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 18, 0, 0); + getContentPane().add(areaChoice, gridBagConstraints); selectAreaButton.setText("Custom Area..."); selectAreaButton.addActionListener(formListener); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 8; + gridBagConstraints.gridy = 14; + gridBagConstraints.gridwidth = 8; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 6, 0, 0); + getContentPane().add(selectAreaButton, gridBagConstraints); stateLabel.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); stateLabel.setText(" "); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 5; + gridBagConstraints.gridy = 17; + gridBagConstraints.gridwidth = 18; + gridBagConstraints.ipadx = 186; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 94, 0, 0); + getContentPane().add(stateLabel, gridBagConstraints); audioSourceLabel.setText("Audio:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 10; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(15, 6, 0, 0); + getContentPane().add(audioSourceLabel, gridBagConstraints); audioSourceChoice.setModel(new javax.swing.DefaultComboBoxModel(new String[]{"No Audio", "44.100 kHz"})); - - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); - getContentPane().setLayout(layout); - layout.setHorizontalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addComponent(stateLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 190, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(startStopButton) - .addGap(10, 10, 10)) - .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(colorsLabel) - .addComponent(mouseLabel) - .addComponent(formatLabel) - .addComponent(areaLabel) - .addComponent(audioSourceLabel)) - .addGap(18, 18, 18) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) - .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(formatChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(colorsChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(cursorChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGap(18, 18, 18) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(screenRateLabel, javax.swing.GroupLayout.Alignment.TRAILING) - .addComponent(mouseRateLabel, javax.swing.GroupLayout.Alignment.TRAILING)) - .addComponent(encodingLabel)) - .addGap(18, 18, 18) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(encodingChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(screenRateField, javax.swing.GroupLayout.PREFERRED_SIZE, 77, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(mouseRateField, javax.swing.GroupLayout.PREFERRED_SIZE, 77, javax.swing.GroupLayout.PREFERRED_SIZE))) - .addGroup(layout.createSequentialGroup() - .addComponent(areaChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(selectAreaButton)) - .addComponent(audioSourceChoice, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) - .addComponent(infoLabel)) - .addContainerGap()) - ); - - layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[]{colorsChoice, cursorChoice, formatChoice}); - - layout.setVerticalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(formatChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(formatLabel)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(colorsChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(colorsLabel)) - .addGap(11, 11, 11) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(cursorChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(mouseLabel))) - .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(encodingChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(encodingLabel)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) - .addComponent(screenRateField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(screenRateLabel)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) - .addComponent(mouseRateField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(mouseRateLabel)))) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(audioSourceLabel) - .addComponent(audioSourceChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(areaChoice, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(areaLabel) - .addComponent(selectAreaButton)) - .addGap(18, 18, 18) - .addComponent(infoLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 8, Short.MAX_VALUE) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(startStopButton) - .addComponent(stateLabel)) - .addContainerGap()) - ); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 5; + gridBagConstraints.gridy = 10; + gridBagConstraints.gridwidth = 47; + gridBagConstraints.gridheight = 2; + gridBagConstraints.ipadx = 198; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(12, 18, 0, 0); + getContentPane().add(audioSourceChoice, gridBagConstraints); + + jLabel1.setText("Sample Rate:"); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 8; + gridBagConstraints.gridy = 12; + gridBagConstraints.gridwidth = 7; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 0); + getContentPane().add(jLabel1, gridBagConstraints); + + audioRateChoice.setModel(new javax.swing.DefaultComboBoxModel<>(new String[]{"48000", "44100"})); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 15; + gridBagConstraints.gridy = 12; + gridBagConstraints.gridwidth = 37; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 18, 0, 0); + getContentPane().add(audioRateChoice, gridBagConstraints); pack(); } @@ -549,11 +634,10 @@ private static Vector getAudioSources() { for (Mixer.Info mixerInfo : AudioSystem.getMixerInfo()) { Mixer mixer = AudioSystem.getMixer(mixerInfo); for (Line.Info targetLineInfo : mixer.getTargetLineInfo()) { - if (targetLineInfo instanceof DataLine.Info dlInfo) { + if (targetLineInfo instanceof DataLine.Info) { + DataLine.Info dlInfo = (DataLine.Info) targetLineInfo; for (AudioFormat format : dlInfo.getFormats()) { - if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED - && !format.isBigEndian() - && format.getSampleRate() != AudioSystem.NOT_SPECIFIED) { + if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) { l.add(new AudioSourceItem( mixerInfo.getName() + ", " + format, mixerInfo, format)); @@ -577,6 +661,8 @@ private void updateValues() { prefs.putInt("ScreenRecording.cursor", cursor); audioSource = audioSourceChoice.getSelectedIndex(); prefs.putInt("ScreenRecording.audioSource", audioSource); + audioRate = (String) audioRateChoice.getSelectedItem(); + prefs.put("ScreenRecording.audioRate", audioRate); area = areaChoice.getSelectedIndex(); prefs.putInt("ScreenRecording.area", area); if (screenRateField.getValue() instanceof Double) { @@ -623,6 +709,10 @@ private void start() throws IOException, AWTException { case FMT_AVI: mimeType = MIME_AVI; switch (encoding) { + case ENC_H264: + videoFormatName = compressorName = ENCODING_AVC1; + bitDepth = 24; + break; case ENC_SCREEN_CAPTURE: default: videoFormatName = compressorName = ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE; @@ -655,6 +745,10 @@ private void start() throws IOException, AWTException { case FMT_QUICKTIME: mimeType = MIME_QUICKTIME; switch (encoding) { + case ENC_H264: + videoFormatName = compressorName = ENCODING_AVC1; + bitDepth = 24; + break; case ENC_SCREEN_CAPTURE: default: if (bitDepth == 8) { @@ -705,6 +799,9 @@ private void start() throws IOException, AWTException { mixerInfo = src.mixerInfo; AudioFormat srcFormat = src.format; audioRate = (int) srcFormat.getSampleRate(); + if (audioRate <= 0) { + audioRate = Integer.parseInt((String) audioRateChoice.getSelectedItem()); + } audioBitsPerSample = srcFormat.getSampleSizeInBits(); audioChannels = srcFormat.getChannels(); audioByteOrder = srcFormat.isBigEndian() ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; @@ -821,7 +918,8 @@ private void recordingFailed() { } private void updateEncodingChoice() { - int index = encodingChoice.getSelectedIndex(); + String encodingItem = (String) encodingChoice.getSelectedItem(); + String colorItem = (String) colorsChoice.getSelectedItem(); switch ((String) formatChoice.getSelectedItem()) { case FMT_MP4: colorsChoice.setModel( @@ -833,16 +931,17 @@ private void updateEncodingChoice() { colorsChoice.setModel( new javax.swing.DefaultComboBoxModel<>(new String[]{COLORS_HUNDREDS, COLORS_THOUSANDS, COLORS_MILLIONS})); encodingChoice.setModel( - new javax.swing.DefaultComboBoxModel<>(new String[]{ENC_SCREEN_CAPTURE, ENC_RUN_LENGTH, ENC_NONE, ENC_PNG, ENC_JPEG_100PERCENT, ENC_JPEG_50PERCENT})); + new javax.swing.DefaultComboBoxModel<>(new String[]{ENC_H264, ENC_SCREEN_CAPTURE, ENC_RUN_LENGTH, ENC_NONE, ENC_PNG, ENC_JPEG_100PERCENT, ENC_JPEG_50PERCENT})); break; case FMT_QUICKTIME: colorsChoice.setModel( new javax.swing.DefaultComboBoxModel<>(new String[]{COLORS_HUNDREDS, COLORS_THOUSANDS, COLORS_MILLIONS})); encodingChoice.setModel( - new javax.swing.DefaultComboBoxModel<>(new String[]{ENC_SCREEN_CAPTURE, ENC_ANIMATION, ENC_NONE, ENC_PNG, ENC_JPEG_100PERCENT, ENC_JPEG_50PERCENT})); + new javax.swing.DefaultComboBoxModel<>(new String[]{ENC_H264, ENC_SCREEN_CAPTURE, ENC_ANIMATION, ENC_NONE, ENC_PNG, ENC_JPEG_100PERCENT, ENC_JPEG_50PERCENT})); break; } - encodingChoice.setSelectedIndex(index); + colorsChoice.setSelectedIndex(findIndex(colorItem, colorsChoice.getModel())); + encodingChoice.setSelectedIndex(findIndex(encodingItem, encodingChoice.getModel())); } private void startStopPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_startStopPerformed @@ -937,6 +1036,7 @@ public void run() { // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JComboBox areaChoice; private javax.swing.JLabel areaLabel; + private javax.swing.JComboBox audioRateChoice; private javax.swing.JComboBox audioSourceChoice; private javax.swing.JLabel audioSourceLabel; private javax.swing.JComboBox colorsChoice; @@ -947,6 +1047,7 @@ public void run() { private javax.swing.JComboBox formatChoice; private javax.swing.JLabel formatLabel; private javax.swing.JLabel infoLabel; + private javax.swing.JLabel jLabel1; private javax.swing.JLabel mouseLabel; private javax.swing.JSpinner mouseRateField; private javax.swing.JLabel mouseRateLabel; diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/amigabitmap/codec/video/AmigaBitmapCodec.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/amigabitmap/codec/video/AmigaBitmapCodec.java index 70de90e..55d3d88 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/amigabitmap/codec/video/AmigaBitmapCodec.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/amigabitmap/codec/video/AmigaBitmapCodec.java @@ -100,7 +100,7 @@ private int encodeToBitmapImage(Buffer in, Buffer out) { try { ILBMDecoder d = new ILBMDecoder(ins); ArrayList imgs = d.produceBitmaps(); - AmigaBitmapImage img = imgs.getFirst(); + AmigaBitmapImage img = imgs.get(0); out.data = img; success = true; } catch (IOException e) { @@ -114,7 +114,7 @@ private int encodeToBitmapImage(Buffer in, Buffer out) { try { PBMDecoder d = new PBMDecoder(ins); ArrayList imgs = d.produce(); - ColorCyclingMemoryImageSource mis = imgs.getFirst(); + ColorCyclingMemoryImageSource mis = imgs.get(0); out.data = AmigaBitmapImageFactory.toBitmapImage(mis); success = true; @@ -159,7 +159,7 @@ private int encodeToBufferedImage(Buffer in, Buffer out) { try { ILBMDecoder d = new ILBMDecoder(ins); ArrayList imgs = d.produceBitmaps(); - AmigaBitmapImage img = imgs.getFirst(); + AmigaBitmapImage img = imgs.get(0); out.data = AmigaBitmapImageFactory.toBufferedImage(img); success = true; } catch (IOException e) { @@ -173,7 +173,7 @@ private int encodeToBufferedImage(Buffer in, Buffer out) { try { PBMDecoder d = new PBMDecoder(ins); ArrayList imgs = d.produce(); - ColorCyclingMemoryImageSource mis = imgs.getFirst(); + ColorCyclingMemoryImageSource mis = imgs.get(0); out.data = AmigaBitmapImageFactory.toBufferedImage( AmigaBitmapImageFactory.toBitmapImage(mis)); success = true; diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMMovieResources.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMMovieResources.java index 2ddb483..0be64a0 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMMovieResources.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMMovieResources.java @@ -169,7 +169,7 @@ public boolean isPlayWrapupFrames() { * double buffering. */ public int getInterleave() { - return frames.size() > 0 && frames.getLast().getInterleave() == 1 ? 1 : 2; + return frames.size() > 0 && frames.get(frames.size() - 1).getInterleave() == 1 ? 1 : 2; } diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMOutputStream.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMOutputStream.java index ab69a92..7bd50f6 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMOutputStream.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/ANIMOutputStream.java @@ -7,7 +7,7 @@ import org.monte.media.amigabitmap.AmigaBitmapImage; import org.monte.media.amigabitmap.AmigaDisplayInfo; import org.monte.media.iff.IFFOutputStream; -import org.monte.media.io.SeekableByteArrayOutputStream; +import org.monte.media.io.ByteArrayImageOutputStream; import javax.imageio.stream.FileImageOutputStream; import javax.imageio.stream.ImageOutputStream; @@ -211,7 +211,7 @@ private void ensureOpen() throws IOException { } /** - * Sets the state of the QuickTimeWriter to started. + * Sets the state of the MP4Writer to started. *

* If the state is changed by this method, the prolog is written. */ @@ -523,7 +523,7 @@ private void writeDLTA(IFFOutputStream out, AmigaBitmapImage img, AmigaBitmapIma byte[] data = img.getBitmap(); byte[] prevData = prev.getBitmap(); - SeekableByteArrayOutputStream buf = new SeekableByteArrayOutputStream(); + ByteArrayImageOutputStream buf = new ByteArrayImageOutputStream(); // Buffers for a theoretical maximum of 16 planes. byte[][] planes = new byte[16][0]; @@ -592,7 +592,7 @@ private void writeDLTA(IFFOutputStream out, AmigaBitmapImage img, AmigaBitmapIma * @param step * @throws IOException */ - private void writeByteVertical(SeekableByteArrayOutputStream out, byte[] data, byte[] prev, int offset, int length, int step) throws IOException { + private void writeByteVertical(ImageOutputStream out, byte[] data, byte[] prev, int offset, int length, int step) throws IOException { int opCount = 0; // Reserve space for opCount in the stream diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/AmigaVideoFormatKeys.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/AmigaVideoFormatKeys.java index 3748ed8..d5adbbd 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/AmigaVideoFormatKeys.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/anim/AmigaVideoFormatKeys.java @@ -131,8 +131,8 @@ private interface InfGetter { private static void reduceListRational(Rational value, InfGetter g, ArrayList infs) { ArrayList bestInfs = new ArrayList<>(); - bestInfs.add(infs.getFirst()); - float bestCost = g.get(infs.getFirst()).subtract(value).floatValue(); + bestInfs.add(infs.get(0)); + float bestCost = g.get(infs.get(0)).subtract(value).floatValue(); bestCost *= bestCost; for (Iterator i = infs.iterator(); i.hasNext(); ) { AmigaDisplayInfo inf = i.next(); @@ -156,8 +156,8 @@ private static void reduceListRational(Rational value, InfGetter g, Ar private static void reduceListInteger(int value, InfGetter g, ArrayList infs) { ArrayList bestInfs = new ArrayList<>(); - bestInfs.add(infs.getFirst()); - float bestCost = g.get(infs.getFirst()) - value; + bestInfs.add(infs.get(0)); + float bestCost = g.get(infs.get(0)) - value; bestCost *= bestCost; for (Iterator i = infs.iterator(); i.hasNext(); ) { AmigaDisplayInfo inf = i.next(); diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000InputStream.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000InputStream.java index e7bfe01..8ad1fd1 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000InputStream.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000InputStream.java @@ -4,7 +4,7 @@ */ package org.monte.media.iff; -import org.monte.media.io.ByteArray; +import org.monte.media.util.ByteArrays; import java.io.EOFException; import java.io.FilterInputStream; @@ -57,7 +57,7 @@ public int readUBYTE() public short readWORD() throws IOException { readFully(byteBuffer, 0, 2); - return ByteArray.getShortBE(byteBuffer, 0); + return ByteArrays.getShortBE(byteBuffer, 0); } /** @@ -76,7 +76,7 @@ public int readUWORD() public int readLONG() throws IOException { readFully(byteBuffer, 0, 4); - return ByteArray.getIntBE(byteBuffer, 0); + return ByteArrays.getIntBE(byteBuffer, 0); } /** @@ -86,7 +86,7 @@ public int readLONG() public long readINT64() throws IOException { readFully(byteBuffer, 0, 8); - return ByteArray.getLongBE(byteBuffer, 0); + return ByteArrays.getLongBE(byteBuffer, 0); } /** diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000OutputStream.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000OutputStream.java index 428968a..01e3861 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000OutputStream.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/iff/MC68000OutputStream.java @@ -4,7 +4,7 @@ */ package org.monte.media.iff; -import org.monte.media.io.ByteArray; +import org.monte.media.util.ByteArrays; import java.io.FilterOutputStream; import java.io.IOException; @@ -33,25 +33,25 @@ public MC68000OutputStream(OutputStream out) { } public void writeLONG(int v) throws IOException { - ByteArray.setIntBE(byteBuffer, 0, v); + ByteArrays.setIntBE(byteBuffer, 0, v); out.write(byteBuffer, 0, 4); incCount(4); } public void writeULONG(long v) throws IOException { - ByteArray.setIntBE(byteBuffer, 0, (int) v); + ByteArrays.setIntBE(byteBuffer, 0, (int) v); out.write(byteBuffer, 0, 4); incCount(4); } public void writeWORD(int v) throws IOException { - ByteArray.setShortBE(byteBuffer, 0, (short) v); + ByteArrays.setShortBE(byteBuffer, 0, (short) v); out.write(byteBuffer, 0, 2); incCount(2); } public void writeUWORD(int v) throws IOException { - ByteArray.setShortBE(byteBuffer, 0, (short) v); + ByteArrays.setShortBE(byteBuffer, 0, (short) v); out.write(byteBuffer, 0, 2); incCount(2); } @@ -178,7 +178,6 @@ public void writeType(String s) throws IOException { * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. * * @return the value of the written field. - * @see java.io.DataOutputStream#written */ public final long size() { return written; diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/pbm/PBMPlayer.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/pbm/PBMPlayer.java index a51e954..3eb1620 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/pbm/PBMPlayer.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/pbm/PBMPlayer.java @@ -111,7 +111,7 @@ protected void doRealizing() { if (track.size() == 0) { setTargetState(CLOSED); } else { - memoryImage = track.getFirst(); + memoryImage = track.get(0); memoryImage.setAnimated(true); if (memoryImage.isColorCyclingAvailable()) { propertyChangeSupport.firePropertyChange("colorCyclingAvailable", false, true); diff --git a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/seq/SEQMovieTrack.java b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/seq/SEQMovieTrack.java index 96513f7..1e126c1 100755 --- a/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/seq/SEQMovieTrack.java +++ b/org.monte.media.amigaatari/src/main/java/org.monte.media.amigaatari/org/monte/media/seq/SEQMovieTrack.java @@ -170,7 +170,7 @@ public boolean isPlayWrapupFrames() { * double buffering. */ public int getInterleave() { - return frames.size() > 0 && frames.getLast().getInterleave() == 1 ? 1 : 2; + return frames.size() > 0 && frames.get(frames.size() - 1).getInterleave() == 1 ? 1 : 2; } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.CodecSpi b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.CodecSpi index 9bd7e52..49ef2e0 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.CodecSpi +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.CodecSpi @@ -1,2 +1,2 @@ -org.monte.media.jcodec.codec.PictureCodecSpi -org.monte.media.jcodec.codec.H264CodecSpi +org.monte.media.jcodec.codec.JCodecPictureCodecSpi +org.monte.media.jcodec.codec.JCodecH264CodecSpi diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.MovieWriterSpi b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.MovieWriterSpi index 92cc22d..57fe9bd 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.MovieWriterSpi +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/META-INF/services/org.monte.media.av.MovieWriterSpi @@ -1 +1 @@ -org.monte.media.jcodec.mp4.MP4WriterSpi \ No newline at end of file +org.monte.media.jcodec.mp4.JCodecMP4WriterSpi \ No newline at end of file diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/module-info.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/module-info.java index 80bada1..9c87f24 100755 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/module-info.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/module-info.java @@ -2,7 +2,9 @@ * Copyright © 2017 Werner Randelshofer, Switzerland. MIT License. */ -import org.monte.media.jcodec.mp4.MP4WriterSpi; +import org.monte.media.jcodec.codec.JCodecH264CodecSpi; +import org.monte.media.jcodec.codec.JCodecPictureCodecSpi; +import org.monte.media.jcodec.mp4.JCodecMP4WriterSpi; /** * Provides a movie writer that uses the JCodec library. @@ -16,6 +18,6 @@ requires org.monte.media; - provides org.monte.media.av.MovieWriterSpi with MP4WriterSpi; - provides org.monte.media.av.CodecSpi with org.monte.media.jcodec.codec.PictureCodecSpi, org.monte.media.jcodec.codec.H264CodecSpi; + provides org.monte.media.av.MovieWriterSpi with JCodecMP4WriterSpi; + provides org.monte.media.av.CodecSpi with JCodecPictureCodecSpi, JCodecH264CodecSpi; } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/H264Codec.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecH264Codec.java similarity index 74% rename from org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/H264Codec.java rename to org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecH264Codec.java index ca0a7ca..48354f6 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/H264Codec.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecH264Codec.java @@ -1,5 +1,5 @@ /* - * @(#)H264Codec.java + * @(#)JCodecH264Codec.java * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ @@ -8,6 +8,8 @@ import org.jcodec.api.transcode.PixelStore; import org.jcodec.api.transcode.VideoFrameWithPacket; import org.jcodec.codecs.h264.H264Encoder; +import org.jcodec.codecs.h264.H264Utils; +import org.jcodec.codecs.h264.io.model.SeqParameterSet; import org.jcodec.common.VideoEncoder; import org.jcodec.common.io.NIOUtils; import org.jcodec.common.model.ColorSpace; @@ -18,9 +20,17 @@ import org.monte.media.av.FormatKeys; import org.monte.media.av.codec.video.AbstractVideoCodec; import org.monte.media.jcodec.impl.AWTUtil; +import org.monte.media.qtff.AvcDecoderConfigurationRecord; +import org.monte.media.util.ArrayUtil; +import org.monte.media.util.ByteArray; import java.awt.image.BufferedImage; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.monte.media.av.BufferFlag.DISCARD; import static org.monte.media.av.BufferFlag.KEYFRAME; @@ -38,16 +48,16 @@ import static org.monte.media.av.codec.video.VideoFormatKeys.HeightKey; import static org.monte.media.av.codec.video.VideoFormatKeys.MotionSearchRangeKey; import static org.monte.media.av.codec.video.VideoFormatKeys.WidthKey; -import static org.monte.media.jcodec.codec.PictureCodec.ENCODING_PICTURE; +import static org.monte.media.jcodec.codec.JCodecPictureCodec.ENCODING_PICTURE; /** * Codec for {@link Picture} to {@code H264} byte array. */ -public class H264Codec extends AbstractVideoCodec { +public class JCodecH264Codec extends AbstractVideoCodec { private VideoEncoder videoEncoder = null; private ByteBuffer byteBuffer; - public H264Codec() { + public JCodecH264Codec() { super(new Format[]{ new Format(MediaTypeKey, FormatKeys.MediaType.VIDEO, EncodingKey, ENCODING_BUFFERED_IMAGE, @@ -136,13 +146,29 @@ public int encode(Buffer in, Buffer out) { byteBuffer = ByteBuffer.allocate(bufferSize); } byteBuffer.clear(); - VideoEncoder.EncodedFrame encodeFrame = enc.encodeFrame(picture, byteBuffer); - outputVideoPacket = Packet.createPacketWithData(videoFrame.getPacket(), NIOUtils.clone(encodeFrame.getData())); - outputVideoPacket.setFrameType(encodeFrame.isKeyFrame() ? Packet.FrameType.KEY : Packet.FrameType.INTER); + VideoEncoder.EncodedFrame encodedFrame = enc.encodeFrame(picture, byteBuffer); + outputVideoPacket = Packet.createPacketWithData(videoFrame.getPacket(), NIOUtils.clone(encodedFrame.getData())); + outputVideoPacket.setFrameType(encodedFrame.isKeyFrame() ? Packet.FrameType.KEY : Packet.FrameType.INTER); + + // compute header + out.header = null; + if (encodedFrame.isKeyFrame()) { + List spsList = new ArrayList<>(); + List ppsList = new ArrayList<>(); + H264Utils.wipePSinplace(outputVideoPacket.data, spsList, ppsList); + if (!spsList.isEmpty()) { + SeqParameterSet p = H264Utils.readSPS(spsList.get(0)); + Function byteBufferFunction = b -> new ByteArray(ArrayUtil.copyOf(b.array(), b.arrayOffset(), b.remaining())); + out.header = new AvcDecoderConfigurationRecord(p.profileIdc, 0, p.levelIdc, 4, + spsList.stream().map(byteBufferFunction).collect(Collectors.toCollection(LinkedHashSet::new)), + ppsList.stream().map(byteBufferFunction).collect(Collectors.toCollection(LinkedHashSet::new))); + } + } - out.setFlag(KEYFRAME, encodeFrame.isKeyFrame()); + out.setFlag(KEYFRAME, encodedFrame.isKeyFrame()); ByteBuffer packetBuf = outputVideoPacket.data; - if (out.data instanceof byte[] byteArray && byteArray.length >= packetBuf.remaining()) { + if (out.data instanceof byte[] && ((byte[]) out.data).length >= packetBuf.remaining()) { + byte[] byteArray = (byte[]) out.data; System.arraycopy(packetBuf.array(), packetBuf.position(), byteArray, 0, packetBuf.remaining()); out.offset = 0; out.length = packetBuf.remaining(); @@ -166,16 +192,12 @@ private VideoEncoder getEncoder(Format outputFormat) { } private Picture getPicture(Buffer buf) { - switch (buf.data) { - case BufferedImage img -> { - - return AWTUtil.fromBufferedImage(img, ColorSpace.YUV420J); - } - case Picture picture -> { - return picture; - } - case null, default -> { - } + if (buf.data instanceof BufferedImage) { + BufferedImage img = (BufferedImage) buf.data; + return AWTUtil.fromBufferedImage(img, ColorSpace.YUV420J); + } else if (buf.data instanceof Picture) { + Picture picture = (Picture) buf.data; + return picture; } return null; } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/H264CodecSpi.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecH264CodecSpi.java similarity index 66% rename from org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/H264CodecSpi.java rename to org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecH264CodecSpi.java index 9fe15af..fbe09e7 100755 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/H264CodecSpi.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecH264CodecSpi.java @@ -12,11 +12,11 @@ * * @author Werner Randelshofer */ -public class H264CodecSpi implements CodecSpi { +public class JCodecH264CodecSpi implements CodecSpi { @Override - public H264Codec create() { - return new H264Codec(); + public JCodecH264Codec create() { + return new JCodecH264Codec(); } } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/PictureCodec.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecPictureCodec.java similarity index 88% rename from org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/PictureCodec.java rename to org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecPictureCodec.java index e0e91f4..e3d1255 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/PictureCodec.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecPictureCodec.java @@ -1,5 +1,5 @@ /* - * @(#)PictureCodec.java + * @(#)JCodecPictureCodec.java * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ @@ -24,10 +24,10 @@ /** * Codec for {@link BufferedImage} to/from {@link Picture}. */ -public class PictureCodec extends AbstractCodec { +public class JCodecPictureCodec extends AbstractCodec { public static final String ENCODING_PICTURE = "picture"; - public PictureCodec() { + public JCodecPictureCodec() { super(new Format[]{ new Format(MediaTypeKey, FormatKeys.MediaType.VIDEO, EncodingKey, ENCODING_BUFFERED_IMAGE, @@ -48,7 +48,8 @@ public int process(Buffer in, Buffer out) { if (in.isFlag(BufferFlag.DISCARD)) { return CODEC_OK; } - if (in.data instanceof BufferedImage img) { + if (in.data instanceof BufferedImage) { + BufferedImage img = (BufferedImage) in.data; out.data = AWTUtil.fromBufferedImage(img, ColorSpace.YUV420J); return CODEC_OK; } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/PictureCodecSpi.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecPictureCodecSpi.java similarity index 65% rename from org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/PictureCodecSpi.java rename to org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecPictureCodecSpi.java index 2d34ee7..cc5ca9f 100755 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/PictureCodecSpi.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/codec/JCodecPictureCodecSpi.java @@ -12,11 +12,11 @@ * * @author Werner Randelshofer */ -public class PictureCodecSpi implements CodecSpi { +public class JCodecPictureCodecSpi implements CodecSpi { @Override - public PictureCodec create() { - return new PictureCodec(); + public JCodecPictureCodec create() { + return new JCodecPictureCodec(); } } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/impl/SequenceEncoderEx.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/impl/SequenceEncoderEx.java index 14e959e..7596163 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/impl/SequenceEncoderEx.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/impl/SequenceEncoderEx.java @@ -92,7 +92,8 @@ public void setFps(Rational fps) { } public void setMotionSearchRange(int newValue) { - if (sink.getVideoEncoder() instanceof H264Encoder e) { + if (sink.getVideoEncoder() instanceof H264Encoder) { + H264Encoder e = (H264Encoder) sink.getVideoEncoder(); e.setMotionSearchRange(newValue); } } diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/MP4Writer.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/JCodecMP4Writer.java similarity index 97% rename from org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/MP4Writer.java rename to org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/JCodecMP4Writer.java index 52e725d..cb6f996 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/MP4Writer.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/JCodecMP4Writer.java @@ -1,5 +1,5 @@ /* - * @(#)MP4Writer.java + * @(#)JCodecMP4Writer.java * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ @@ -44,7 +44,6 @@ import static org.jcodec.common.Codec.H264; import static org.monte.media.av.BufferFlag.DISCARD; -import static org.monte.media.av.BufferFlag.KEYFRAME; import static org.monte.media.av.FormatKeys.EncodingKey; import static org.monte.media.av.FormatKeys.FrameRateKey; import static org.monte.media.av.FormatKeys.KeyFrameIntervalKey; @@ -64,9 +63,9 @@ import static org.monte.media.av.codec.video.VideoFormatKeys.MotionSearchRangeKey; import static org.monte.media.av.codec.video.VideoFormatKeys.QualityKey; import static org.monte.media.av.codec.video.VideoFormatKeys.WidthKey; -import static org.monte.media.jcodec.codec.PictureCodec.ENCODING_PICTURE; +import static org.monte.media.jcodec.codec.JCodecPictureCodec.ENCODING_PICTURE; -public class MP4Writer implements MovieWriter { +public class JCodecMP4Writer implements MovieWriter { public final static Format MP4 = new Format(MediaTypeKey, FormatKeys.MediaType.FILE, MimeTypeKey, MIME_MP4); private final List tracks = new ArrayList<>(); private Muxer muxer; @@ -86,12 +85,12 @@ private static class Track { private VideoEncoder videoEncoder; } - public MP4Writer(File file) throws IOException { + public JCodecMP4Writer(File file) throws IOException { destStream = NIOUtils.writableChannel(file); muxer = MP4Muxer.createMP4MuxerToChannel(destStream); } - public MP4Writer(FileOutputStream out) throws IOException { + public JCodecMP4Writer(FileOutputStream out) throws IOException { destStream = new FileChannelWrapper(out.getChannel()); muxer = MP4Muxer.createMP4MuxerToChannel(destStream); } @@ -219,8 +218,6 @@ public void write(int track, Buffer buf) throws IOException { Track tr = tracks.get(track); - boolean isKeyframe = buf.flags.contains(KEYFRAME); - // Encode sample data // We got here, because the buffer format does not match the track diff --git a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/MP4WriterSpi.java b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/JCodecMP4WriterSpi.java similarity index 82% rename from org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/MP4WriterSpi.java rename to org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/JCodecMP4WriterSpi.java index fd7904b..6277b15 100644 --- a/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/MP4WriterSpi.java +++ b/org.monte.media.jcodec/src/main/java/org.monte.media.jcodec/org/monte/media/jcodec/mp4/JCodecMP4WriterSpi.java @@ -1,5 +1,5 @@ /* - * @(#)MP4WriterSpi.java + * @(#)JCodecMP4WriterSpi.java * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ @@ -16,16 +16,16 @@ import java.util.Collections; import java.util.List; -public class MP4WriterSpi implements MovieWriterSpi { +public class JCodecMP4WriterSpi implements MovieWriterSpi { private final static List extensions = Collections.unmodifiableList(Arrays.asList(new String[]{"mp4", "m4v", "m4a"})); - public MP4WriterSpi() { + public JCodecMP4WriterSpi() { } @Override public MovieWriter create(File file) throws IOException { - return new MP4Writer(file); + return new JCodecMP4Writer(file); } @Override @@ -40,7 +40,7 @@ public List getExtensions() { @Override public Format getFileFormat() { - return MP4Writer.MP4; + return JCodecMP4Writer.MP4; } } diff --git a/org.monte.media.jmf/src/main/java/org.monte.media.jmf/org/monte/media/jmf/codec/video/TSCCCodec.java b/org.monte.media.jmf/src/main/java/org.monte.media.jmf/org/monte/media/jmf/codec/video/TSCCCodec.java index 14f6178..feac0bf 100755 --- a/org.monte.media.jmf/src/main/java/org.monte.media.jmf/org/monte/media/jmf/codec/video/TSCCCodec.java +++ b/org.monte.media.jmf/src/main/java/org.monte.media.jmf/org/monte/media/jmf/codec/video/TSCCCodec.java @@ -6,7 +6,7 @@ import com.sun.media.format.AviVideoFormat; import org.monte.media.av.codec.video.TechSmithCodecCore; -import org.monte.media.io.SeekableByteArrayOutputStream; +import org.monte.media.io.ByteArrayImageOutputStream; import org.monte.media.util.ArrayUtil; import javax.media.Buffer; @@ -178,7 +178,7 @@ protected int encode(Buffer in, Buffer out) { } out.setFormat(outputFormat); - SeekableByteArrayOutputStream tmp = new SeekableByteArrayOutputStream(ArrayUtil.reuseByteArray(out.getData(), 32)); + ByteArrayImageOutputStream tmp = new ByteArrayImageOutputStream(ArrayUtil.reuseByteArray(out.getData(), 32)); VideoFormat outvf = outputFormat; boolean isKeyframe = isSet(in, Buffer.FLAG_KEY_FRAME) || frameCounter % (int) outvf.getFrameRate() == 0; @@ -290,7 +290,7 @@ protected int encode(Buffer in, Buffer out) { out.setFormat(outputFormat); out.setData(tmp.getBuffer()); out.setOffset(0); - out.setLength(tmp.size()); + out.setLength((int) tmp.length()); return BUFFER_PROCESSED_OK; } catch (IOException ex) { ex.printStackTrace(); @@ -318,34 +318,30 @@ protected int decode(Buffer in, Buffer out) { int pixelStride; int offset = in.getOffset(); int inputDepth; - switch (invf) { - case RGBFormat rgbFormat -> { - RGBFormat inrgbf = (RGBFormat) outputFormat; - inputDepth = 24; - scanlineStride = inrgbf.getLineStride(); - pixelStride = inrgbf.getPixelStride(); - if (inrgbf.getFlipped() == Format.TRUE) { - offset += (height - 1) * scanlineStride; - scanlineStride = -scanlineStride; - } - } - case AviVideoFormat inavivf -> { - inputDepth = inavivf.getBitsPerPixel(); - scanlineStride = width; - pixelStride = 1; - } - case IndexedColorFormat inicvf -> { - inputDepth = 8; - scanlineStride = inicvf.getLineStride(); - pixelStride = 1; - } - default -> { - // If this is a QuickTime movie, we can not determine the input depth. - // So, we fail to decode QuickTime videos that have depth that is different from 24 bits. - VideoFormat vf = invf; - inputDepth = 24; - scanlineStride = pixelStride = vf.getSize().width * 3; + if (invf instanceof RGBFormat) { + RGBFormat inrgbf = (RGBFormat) outputFormat; + inputDepth = 24; + scanlineStride = inrgbf.getLineStride(); + pixelStride = inrgbf.getPixelStride(); + if (inrgbf.getFlipped() == Format.TRUE) { + offset += (height - 1) * scanlineStride; + scanlineStride = -scanlineStride; } + } else if (invf instanceof AviVideoFormat) { + AviVideoFormat inavivf = (AviVideoFormat) invf; + inputDepth = inavivf.getBitsPerPixel(); + scanlineStride = width; + pixelStride = 1; + } else if (invf instanceof IndexedColorFormat) { + IndexedColorFormat inicvf = (IndexedColorFormat) invf; + inputDepth = 8; + scanlineStride = inicvf.getLineStride(); + pixelStride = 1; + } else {// If this is a QuickTime movie, we can not determine the input depth. + // So, we fail to decode QuickTime videos that have depth that is different from 24 bits. + VideoFormat vf = invf; + inputDepth = 24; + scanlineStride = pixelStride = vf.getSize().width * 3; } int outputDepth = inputDepth; if (outputFormat instanceof RGBFormat) { diff --git a/org.monte.media.screenrecorder/src/main/java/org.monte.media.screenrecorder/org/monte/media/screenrecorder/ScreenRecorder.java b/org.monte.media.screenrecorder/src/main/java/org.monte.media.screenrecorder/org/monte/media/screenrecorder/ScreenRecorder.java index 7d8128d..5b27094 100755 --- a/org.monte.media.screenrecorder/src/main/java/org.monte.media.screenrecorder/org/monte/media/screenrecorder/ScreenRecorder.java +++ b/org.monte.media.screenrecorder/src/main/java/org.monte.media.screenrecorder/org/monte/media/screenrecorder/ScreenRecorder.java @@ -37,10 +37,11 @@ import java.io.File; import java.io.IOException; import java.nio.ByteOrder; -import java.text.SimpleDateFormat; +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collections; -import java.util.Date; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.CancellationException; @@ -352,9 +353,6 @@ protected MovieWriter createMovieWriter() throws IOException { .append(WidthKey, captureArea.width, HeightKey, captureArea.height); videoTrackId = w.addTrack(videoOutputFormat); - if (audioFormat != null) { - audioTrackId = w.addTrack(audioFormat); - } Codec encoder = Registry.getInstance().getEncoder(w.getFormat(videoTrackId)); if (encoder == null) { @@ -367,6 +365,11 @@ protected MovieWriter createMovieWriter() throws IOException { throw new IOException("Unable to encode video frames in this output format:\n" + videoOutputFormat); } + // Create the audio encoder + if (audioFormat != null) { + audioTrackId = w.addTrack(audioFormat); + } + // If the capture area does not have the same dimensions as the // video format, create a codec chain which scales the image before // performing the frame encoding. @@ -418,9 +421,9 @@ protected File createMovieFile(Format fileFormat) throws IOException { throw new IOException("\"" + movieFolder + "\" is not a directory."); } - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd 'at' HH.mm.ss"); + DateTimeFormatter dateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd 'at' HH.mm.ss").withZone(ZoneId.systemDefault()); - return new File(movieFolder, "ScreenRecording " + dateFormat.format(new Date()) + "." + Registry.getInstance().getExtension(fileFormat)); + return new File(movieFolder, "ScreenRecording " + dateFormat.format(Instant.now()) + "." + Registry.getInstance().getExtension(fileFormat)); } /** @@ -605,7 +608,6 @@ protected void waitUntilMouseCaptureStopped() throws InterruptedException { */ private void startAudioCapture() throws LineUnavailableException { audioCaptureTimer = new ScheduledThreadPoolExecutor(1); - int delay = 500; audioGrabber = new AudioGrabber(this, mixer, audioFormat, audioTrackId, recordingStartTime, writerQueue); audioFuture = audioCaptureTimer.scheduleWithFixedDelay(audioGrabber, 0, 10, TimeUnit.MILLISECONDS); audioGrabber.setFuture(audioFuture); diff --git a/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/DropFileTransferHandler.java b/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/DropFileTransferHandler.java index 5466f1b..3f3c985 100755 --- a/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/DropFileTransferHandler.java +++ b/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/DropFileTransferHandler.java @@ -94,7 +94,7 @@ public boolean importData(JComponent c, Transferable t) { @SuppressWarnings("unchecked") List list = (List) t.getTransferData(DataFlavor.javaFileListFlavor); if (!list.isEmpty()) { - File file = list.getFirst(); + File file = list.get(0); switch (fileSelectionMode) { case JFileChooser.FILES_AND_DIRECTORIES: diff --git a/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/FileTextFieldTransferHandler.java b/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/FileTextFieldTransferHandler.java index 365fe11..58c9a45 100755 --- a/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/FileTextFieldTransferHandler.java +++ b/org.monte.media.swing/src/main/java/org.monte.media.swing/org/monte/media/swing/datatransfer/FileTextFieldTransferHandler.java @@ -93,7 +93,7 @@ public boolean importData(JComponent comp, Transferable t) { @SuppressWarnings("unchecked") List list = (List) t.getTransferData(DataFlavor.javaFileListFlavor); if (!list.isEmpty()) { - File file = list.getFirst(); + File file = list.get(0); switch (fileSelectionMode) { case JFileChooser.FILES_AND_DIRECTORIES: diff --git a/org.monte.media/pom.xml b/org.monte.media/pom.xml index d5f7d7e..6794b64 100644 --- a/org.monte.media/pom.xml +++ b/org.monte.media/pom.xml @@ -44,8 +44,8 @@ org.apache.maven.plugins maven-compiler-plugin - 21 - 21 + 17 + 17 diff --git a/org.monte.media/src/main/java/org.monte.media/module-info.java b/org.monte.media/src/main/java/org.monte.media/module-info.java index c749ea4..b8386bf 100755 --- a/org.monte.media/src/main/java/org.monte.media/module-info.java +++ b/org.monte.media/src/main/java/org.monte.media/module-info.java @@ -95,6 +95,7 @@ exports org.monte.media.util.stream; exports org.monte.media.zipmovie; exports org.monte.media.av.codec.text; + exports org.monte.media.qtff; uses org.monte.media.av.CodecSpi; uses org.monte.media.av.MovieWriterSpi; @@ -115,6 +116,7 @@ provides org.monte.media.av.MovieWriterSpi with org.monte.media.quicktime.QuickTimeWriterSpi, + org.monte.media.mp4.MP4WriterSpi, org.monte.media.avi.AVIWriterSpi, org.monte.media.zipmovie.ZipMovieWriterSpi; diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/CodecChain.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/CodecChain.java index 1581c8a..5b853a6 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/CodecChain.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/CodecChain.java @@ -45,7 +45,7 @@ public static Codec createCodecChain(Codec... codecs) { if (clist.size() == 1) { return codecs[0]; } else { - CodecChain cc = new CodecChain(clist.get(clist.size() - 2), clist.getLast()); + CodecChain cc = new CodecChain(clist.get(clist.size() - 2), clist.get(clist.size() - 1)); for (int i = clist.size() - 3; i >= 0; i--) { cc = new CodecChain(clist.get(i), cc); } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultMovie.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultMovie.java index 67a071a..4096fa9 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultMovie.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultMovie.java @@ -30,7 +30,7 @@ public void setReader(MovieReader reader) { @Override public Rational getDuration() { try { - return reader.getDuration(); + return reader.getMovieDuration(); } catch (IOException ex) { InternalError ie = new InternalError("Can't read duration."); ie.initCause(ex); @@ -41,7 +41,7 @@ public Rational getDuration() { @Override public long timeToSample(int track, Rational time) { try { - return reader.timeToSample(track, time); + return reader.findSampleAtTime(track, time); } catch (IOException ex) { return 0; } @@ -50,7 +50,7 @@ public long timeToSample(int track, Rational time) { @Override public Rational sampleToTime(int track, long sample) { try { - return reader.sampleToTime(track, sample); + return reader.getSampleTime(track, sample); } catch (IOException ex) { return new Rational(0); } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultRegistry.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultRegistry.java index fae9b23..db3014e 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultRegistry.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/DefaultRegistry.java @@ -43,10 +43,10 @@ private synchronized Map getMimeTypeToExtensionMap() { if (mimeTypeToExtensionMap == null) { mimeTypeToExtensionMap = new LinkedHashMap<>(); for (MovieReaderSpi spi : getReaderSpis()) { - mimeTypeToExtensionMap.put(spi.getFileFormat().get(FormatKeys.MimeTypeKey), spi.getExtensions().isEmpty() ? "" : spi.getExtensions().getFirst()); + mimeTypeToExtensionMap.put(spi.getFileFormat().get(FormatKeys.MimeTypeKey), spi.getExtensions().isEmpty() ? "" : spi.getExtensions().get(0)); } for (MovieWriterSpi spi : getWriterSpis()) { - mimeTypeToExtensionMap.put(spi.getFileFormat().get(FormatKeys.MimeTypeKey), spi.getExtensions().isEmpty() ? "" : spi.getExtensions().getFirst()); + mimeTypeToExtensionMap.put(spi.getFileFormat().get(FormatKeys.MimeTypeKey), spi.getExtensions().isEmpty() ? "" : spi.getExtensions().get(0)); } } return mimeTypeToExtensionMap; @@ -69,7 +69,7 @@ private synchronized Map getExtensionToFormatMap() { return extensionToFormatMap; } - private synchronized List getReaderSpis() { + public synchronized List getReaderSpis() { if (readerSpis == null) { readerSpis = new ArrayList<>(); for (MovieReaderSpi spi : ServiceLoader.load(MovieReaderSpi.class)) { @@ -79,7 +79,7 @@ private synchronized List getReaderSpis() { return readerSpis; } - private synchronized List getWriterSpis() { + public synchronized List getWriterSpis() { if (writerSpis == null) { writerSpis = new ArrayList<>(); for (MovieWriterSpi spi : ServiceLoader.load(MovieWriterSpi.class)) { @@ -150,6 +150,17 @@ public MovieReader getReader(Format fileFormat, File file) throws IOException { throw new IOException("Could not find a reader with format " + fileFormat + " for file " + file + "."); } + @Override + public List getReaderSpis(Format fileFormat) throws IOException { + List result = new ArrayList<>(); + for (MovieReaderSpi spi : getReaderSpis()) { + if (spi.getFileFormat().matches(fileFormat)) { + result.add(spi); + } + } + return result; + } + @Override public MovieWriter getWriter(Format fileFormat, File file) throws IOException { if (fileFormat == null) { @@ -163,6 +174,17 @@ public MovieWriter getWriter(Format fileFormat, File file) throws IOException { return null; } + @Override + public List getWriterSpis(Format fileFormat) throws IOException { + List result = new ArrayList<>(); + for (MovieWriterSpi spi : getWriterSpis()) { + if (spi.getFileFormat().matches(fileFormat)) { + result.add(spi); + } + } + return result; + } + @Override public List getWriterFormats() { Set result = new LinkedHashSet<>(); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Format.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Format.java index d390816..cd70bcf 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Format.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Format.java @@ -66,7 +66,8 @@ public T get(FormatKey key) { @SuppressWarnings("unchecked") public T get(FormatKey key, T defaultValue) { - return (properties.containsKey(key)) ? (T) properties.get(key) : defaultValue; + T actualValue = (T) properties.get(key); + return actualValue != null ? actualValue : defaultValue; } public boolean containsKey(FormatKey key) { diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/FormatKeys.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/FormatKeys.java index 9eb173e..3cd122e 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/FormatKeys.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/FormatKeys.java @@ -35,6 +35,11 @@ public static enum MediaType { * The format of the samples of the media. */ public final static FormatKey SampleFormatKey = new FormatKey<>("sampleFormat", String.class); + /** + * The data class. + */ + @SuppressWarnings("rawtypes") + public final static FormatKey DataClassKey = new FormatKey<>("dataClass", Class.class); // public final static String MIME_AVI = "video/avi"; diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/MovieReader.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/MovieReader.java index 00b34f5..2216fad 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/MovieReader.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/MovieReader.java @@ -41,27 +41,59 @@ public interface MovieReader extends AutoCloseable { /** * Returns the total duration of the movie . */ - Rational getDuration() throws IOException; + Rational getMovieDuration() throws IOException; /** * Returns the duration of the specified track. */ - Rational getDuration(int track) throws IOException; + Rational getTrackDuration(int track) throws IOException; /** - * Returns the sample number for the specified time. + * Returns the sample number for a given time. + *

+ * The following cases can occur: + *

+ *
The track starts after the specified time.
+ *
In this case the method returns the index of the first sample in the track.
+ * + *
The track ends before the specified time.
+ *
In this case the method returns the index of the last sample in the track.
+ * + *
The track starts before the specified time and ends after the specified time.
+ *
In this case the method returns the index of the first sample that + * intersects with the time.
+ *
+ * + * @param track the track number + * @param seconds the time in seconds + * @return the sample number + * @throws IOException on IO failure */ - long timeToSample(int track, Rational seconds) throws IOException; + long findSampleAtTime(int track, Rational seconds) throws IOException; /** - * Returns the time for the specified sample number. + * Returns the movie time for the specified sample number. + *

+ * There can be multiple samples at the same time. + * In this case, all except the last sample at this time are used + * for prefetching. + * + * @param track the track number + * @param sample the sample number + * @return the sample time + * @throws IOException on IO failure */ - Rational sampleToTime(int track, long sample) throws IOException; + Rational getSampleTime(int track, long sample) throws IOException; /** - * Returns the duration of the specified sample number. + * Returns the duration of the specified sample. + * + * @param track the track number + * @param sample the sample number + * @return the duration of the specified sample + * @throws IOException on IO failure */ - Rational getDuration(int track, long sample) throws IOException; + Rational getSampleDuration(int track, long sample) throws IOException; /** * Returns the file format. diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Registry.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Registry.java index 9e428c3..02f4ad2 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Registry.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/Registry.java @@ -7,10 +7,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Set; import static org.monte.media.av.FormatKeys.EncodingKey; import static org.monte.media.av.FormatKeys.MediaTypeKey; @@ -25,6 +22,29 @@ public abstract class Registry { private static Registry instance; + /** + * Gets the registry instance. + *

+ * Will create and return a {@link DefaultRegistry} if no instance has been set. + * + * @return the instance + */ + public static Registry getInstance() { + if (instance == null) { + instance = new DefaultRegistry(); + } + return instance; + } + + /** + * Sets the registry instance. + * + * @param instanceNullable a new instance or null + */ + public void setInstance(Registry instanceNullable) { + instance = instanceNullable; + } + /** * Gets a codec which can transcode from the specified input format to the * specified output format. @@ -35,7 +55,7 @@ public abstract class Registry { */ public final Codec getCodec(Format inputFormat, Format outputFormat) { List codecs = getCodecs(inputFormat, outputFormat); - return codecs.isEmpty() ? null : codecs.getFirst(); + return codecs.isEmpty() ? null : codecs.get(0); } /** @@ -93,13 +113,6 @@ public final List getEncoders(Format outputFormat) { public abstract Format getFileFormat(File file); - public List getFileFormats() { - Set formats = new LinkedHashSet<>(); - formats.addAll(getReaderFormats()); - formats.addAll(getWriterFormats()); - return Collections.unmodifiableList(new ArrayList<>(formats)); - } - /** * Gets a reader for the specified file format and file. * @@ -110,6 +123,13 @@ public List getFileFormats() { */ public abstract MovieReader getReader(Format fileFormat, File file) throws IOException; + /** + * Gets a reader for the specified file. + * + * @param file the desired file + * @return a reader + * @throws IOException if no reader could be found + */ public MovieReader getReader(File file) throws IOException { Format format = getFileFormat(file); if (format == null) { @@ -118,10 +138,49 @@ public MovieReader getReader(File file) throws IOException { return getReader(format, file); } + /** + * Gets all registered reader formats. + * @return the reader formats + */ public abstract List getReaderFormats(); - public abstract List getWriterFormats(); + /** + * Gets all reader SPIs for the specified file. + * + * @param file the desired file + * @return the reader SPIs + * @throws IOException if no reader could be found + */ + public List getReaderSpis(File file) throws IOException { + Format format = getFileFormat(file); + if (format == null) { + throw new IOException("Could not identify the file format of file " + file + "."); + } + return getReaderSpis(format); + } + + /** + * Gets all reader SPIs for the specified file format. + * + * @param fileFormat the desired file format. + * @return the reader SPIs + * @throws IOException if no reader could be found + */ + public abstract List getReaderSpis(Format fileFormat) throws IOException; + + /** + * Gets all reader SPIs. + * + * @return the reader SPIs + */ + public abstract List getReaderSpis(); + /** + * Gets a writer for the specified file. + * + * @param file the desired file + * @return a writer or null + */ public MovieWriter getWriter(File file) throws IOException { Format format = getFileFormat(file); return format == null ? null : getWriter(format, file); @@ -136,6 +195,43 @@ public MovieWriter getWriter(File file) throws IOException { */ public abstract MovieWriter getWriter(Format fileFormat, File file) throws IOException; + /** + * Gets all registered writer formats. + * @return the reader formats + */ + public abstract List getWriterFormats(); + + /** + * Gets all reader SPIs for the specified file. + * + * @param file the desired file + * @return the reader SPIs + * @throws IOException if no reader could be found + */ + public List getWriterSpis(File file) throws IOException { + Format format = getFileFormat(file); + if (format == null) { + throw new IOException("Could not identify the file format of file " + file + "."); + } + return getWriterSpis(format); + } + + /** + * Gets all writer SPIs. + * + * @return the writer SPIs + */ + public abstract List getWriterSpis(); + + /** + * Gets all writer SPIs for the specified file format. + * + * @param fileFormat the desired file format. + * @return the reader SPIs + * @throws IOException if no reader could be found + */ + public abstract List getWriterSpis(Format fileFormat) throws IOException; + /** * Suggests output formats for the given input media format and specified * file format. @@ -156,7 +252,7 @@ public ArrayList suggestOutputFormats(Format inputMediaFormat, Format ou if (mf.matches(matchFormat)) { if (inputMediaFormat.matchesWithout(mf, MimeTypeKey)) { // add matching formats first - formats.addFirst(mf.append(inputMediaFormat)); + formats.add(0, mf.append(inputMediaFormat)); matchingCount++; } else if (inputMediaFormat.matchesWithout(mf, MimeTypeKey, EncodingKey)) { // add formats which match everything but the encoding second @@ -184,27 +280,4 @@ public ArrayList suggestOutputFormats(Format inputMediaFormat, Format ou return formats; } - /** - * Gets the registry instance. - *

- * Will create and return a {@link DefaultRegistry} if no instance has been set. - * - * @return the instance - */ - public static Registry getInstance() { - if (instance == null) { - instance = new DefaultRegistry(); - } - return instance; - } - - /** - * Sets the registry instance. - * - * @param instanceNullable a new instance or null - */ - public void setInstance(Registry instanceNullable) { - instance = instanceNullable; - } - } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/audio/AudioFormatKeys.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/audio/AudioFormatKeys.java index d623896..6ae0306 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/audio/AudioFormatKeys.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/audio/AudioFormatKeys.java @@ -74,7 +74,7 @@ public class AudioFormatKeys extends FormatKeys { */ public final static FormatKey ChannelsKey = new FormatKey<>("channels", Integer.class); /** - * The size of a frame. + * The size of a frame in bits or bytes?. */ public final static FormatKey FrameSizeKey = new FormatKey<>("frameSize", Integer.class); /** diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/AVIBMPDIB.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/AVIBMPDIB.java index c5fea01..0495af3 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/AVIBMPDIB.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/AVIBMPDIB.java @@ -132,7 +132,8 @@ public static ImageInputStream prependDHTSeg(ImageInputStream iisWithoutDHT) thr } public static ImageInputStream prependDHTSeg(InputStream inWithoutDHT) throws IOException { - if (inWithoutDHT instanceof ByteArrayInputStream b) { + if (inWithoutDHT instanceof ByteArrayInputStream) { + ByteArrayInputStream b = (ByteArrayInputStream) inWithoutDHT; return prependDHTSeg(b); } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/ColorAdjustCodec.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/ColorAdjustCodec.java index f999e57..b967208 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/ColorAdjustCodec.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/ColorAdjustCodec.java @@ -14,7 +14,6 @@ import java.awt.image.BufferedImage; -import static java.lang.Math.clamp; import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.Math.sqrt; @@ -24,6 +23,7 @@ import static org.monte.media.av.FormatKeys.MediaTypeKey; import static org.monte.media.av.FormatKeys.MimeTypeKey; import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_BUFFERED_IMAGE; +import static org.monte.media.util.MathUtil.clamp; /** * Adjusts the colors of a buffered image. diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodec.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodec.java index ed52fbd..e4e3265 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodec.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodec.java @@ -8,7 +8,7 @@ import org.monte.media.av.BufferFlag; import org.monte.media.av.Format; import org.monte.media.av.FormatKeys.MediaType; -import org.monte.media.io.SeekableByteArrayOutputStream; +import org.monte.media.io.ByteArrayImageOutputStream; import org.monte.media.util.ArrayUtil; import java.awt.Rectangle; @@ -265,7 +265,7 @@ public int decode(Buffer in, Buffer out) { return CODEC_FAILED; } - BufferedImage img = (out.data instanceof BufferedImage bimg) ? bimg : null; + BufferedImage img = (out.data instanceof BufferedImage) ? (BufferedImage) out.data : null; switch (outputDepth) { case 8: { @@ -328,7 +328,7 @@ public int encode(Buffer in, Buffer out) { return CODEC_OK; } - SeekableByteArrayOutputStream tmp = new SeekableByteArrayOutputStream(ArrayUtil.reuseByteArray(out.data, 32)); + ByteArrayImageOutputStream tmp = new ByteArrayImageOutputStream(ArrayUtil.reuseByteArray(out.data, 32)); Integer keyFrameInterval = outputFormat.get(KeyFrameIntervalKey, outputFormat.get(FrameRateKey).intValue()); boolean isKeyframe = frameCounter == 0 @@ -446,7 +446,7 @@ public int encode(Buffer in, Buffer out) { out.data = tmp.getBuffer(); out.offset = 0; out.sampleCount = 1; - out.length = tmp.size(); + out.length = (int) tmp.length(); return CODEC_OK; } catch (IOException ex) { out.exception = ex; diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecCore.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecCore.java index eaa2e50..ecc5a6e 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecCore.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecCore.java @@ -5,12 +5,13 @@ package org.monte.media.av.codec.video; import org.monte.media.io.ByteArrayImageInputStream; +import org.monte.media.io.ImageOutputStreamAdapter; import org.monte.media.io.UncachedImageInputStream; import javax.imageio.stream.ImageInputStream; +import javax.imageio.stream.ImageOutputStream; import java.io.ByteArrayInputStream; import java.io.IOException; -import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; @@ -170,11 +171,11 @@ public TechSmithCodecCore() { reset(); } - private void deflateBBuf(OutputStream out) throws IOException { + private void deflateBBuf(ImageOutputStream out) throws IOException { if (bbuf.position() == 2) { out.write(bbuf.array(), 0, 2); } else { - DeflaterOutputStream defl = new DeflaterOutputStream(out); + DeflaterOutputStream defl = new DeflaterOutputStream(new ImageOutputStreamAdapter(out)); defl.write(bbuf.array(), 0, bbuf.position()); defl.finish(); } @@ -585,7 +586,7 @@ public boolean decode16(byte[] inDat, int off, int length, int[] outDat, int[] p * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeDelta8(OutputStream out, byte[] data, byte[] prev, int width, int height, int offset, int scanlineStride) + public void encodeDelta8(ImageOutputStream out, byte[] data, byte[] prev, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 1); @@ -713,7 +714,7 @@ public void encodeDelta8(OutputStream out, byte[] data, byte[] prev, int width, * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeDelta8to24(OutputStream out, byte[] data, byte[] prev, int width, int height, int offset, int scanlineStride) + public void encodeDelta8to24(ImageOutputStream out, byte[] data, byte[] prev, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 3); @@ -853,7 +854,7 @@ public void encodeDelta8to24(OutputStream out, byte[] data, byte[] prev, int wid * @param scanlineStride * @throws IOException */ - public void encodeSameDelta8(OutputStream out, byte[] data, byte[] prev, int width, int height, int offset, int scanlineStride) + public void encodeSameDelta8(ImageOutputStream out, byte[] data, byte[] prev, int width, int height, int offset, int scanlineStride) throws IOException { out.write(0); // Escape code out.write(0x01);// End of bitmap @@ -872,7 +873,7 @@ public void encodeSameDelta8(OutputStream out, byte[] data, byte[] prev, int wid * @param scanlineStride * @throws IOException */ - public void encodeSameDelta24(OutputStream out, int[] data, int[] prev, int width, int height, int offset, int scanlineStride) + public void encodeSameDelta24(ImageOutputStream out, int[] data, int[] prev, int width, int height, int offset, int scanlineStride) throws IOException { out.write(0); // Escape code out.write(0x01);// End of bitmap @@ -891,7 +892,7 @@ public void encodeSameDelta24(OutputStream out, int[] data, int[] prev, int widt * @param scanlineStride * @throws IOException */ - public void encodeSameDelta16(OutputStream out, short[] data, short[] prev, int width, int height, int offset, int scanlineStride) + public void encodeSameDelta16(ImageOutputStream out, short[] data, short[] prev, int width, int height, int offset, int scanlineStride) throws IOException { out.write(0); // Escape code out.write(0x01);// End of bitmap @@ -907,7 +908,7 @@ public void encodeSameDelta16(OutputStream out, short[] data, short[] prev, int * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeKey8(OutputStream out, byte[] data, int width, int height, int offset, int scanlineStride) + public void encodeKey8(ImageOutputStream out, byte[] data, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 1); int ymax = offset + height * scanlineStride; @@ -998,7 +999,7 @@ public void encodeKey8(OutputStream out, byte[] data, int width, int height, int * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeKey8to24(OutputStream out, byte[] data, int width, int height, int offset, int scanlineStride) + public void encodeKey8to24(ImageOutputStream out, byte[] data, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 3); int ymax = offset + height * scanlineStride; @@ -1098,7 +1099,7 @@ public void encodeKey8to24(OutputStream out, byte[] data, int width, int height, * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeDelta16(OutputStream out, short[] data, short[] prev, int width, int height, int offset, int scanlineStride) + public void encodeDelta16(ImageOutputStream out, short[] data, short[] prev, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 2); @@ -1220,7 +1221,7 @@ public void encodeDelta16(OutputStream out, short[] data, short[] prev, int widt * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeKey24(OutputStream out, int[] data, int width, int height, int offset, int scanlineStride) + public void encodeKey24(ImageOutputStream out, int[] data, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 3); int ymax = offset + height * scanlineStride; @@ -1311,7 +1312,7 @@ public void encodeKey24(OutputStream out, int[] data, int width, int height, int * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeDelta24(OutputStream out, int[] data, int[] prev, int width, int height, int offset, int scanlineStride) + public void encodeDelta24(ImageOutputStream out, int[] data, int[] prev, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 3); @@ -1438,7 +1439,7 @@ public void encodeDelta24(OutputStream out, int[] data, int[] prev, int width, i * @param scanlineStride The number to add to offset to get to the next * scanline. */ - public void encodeKey16(OutputStream out, short[] data, int width, int height, int offset, int scanlineStride) + public void encodeKey16(ImageOutputStream out, short[] data, int width, int height, int offset, int scanlineStride) throws IOException { ensureBBufCapacity(width, height, 2); int ymax = offset + height * scanlineStride; diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/VideoFormatKeys.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/VideoFormatKeys.java index 0154fc8..b3aa1df 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/VideoFormatKeys.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/av/codec/video/VideoFormatKeys.java @@ -26,11 +26,6 @@ public class VideoFormatKeys extends FormatKeys { * The compressor name. */ public final static FormatKey CompressorNameKey = new FormatKey<>("compressorName", "compressorName", String.class, true, false); - /** - * The data class. - */ - @SuppressWarnings("rawtypes") - public final static FormatKey DataClassKey = new FormatKey<>("dataClass", Class.class); /** * The number of bits per pixel. */ diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIInputStream.java index 4dbeecc..4eb2567 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIInputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIInputStream.java @@ -344,7 +344,7 @@ public void visitChunk(RIFFChunk group, RIFFChunk chunk) throws ParseException, // The first chunk and all uncompressed chunks are keyframes s.isKeyframe = tr.samples.isEmpty() || (id & 0xffff) == WB_ID || (id & 0xffff) == DB_ID; if (tr.samples.size() > 0) { - Sample lastSample = tr.samples.getLast(); + Sample lastSample = tr.samples.get(tr.samples.size() - 1); s.timeStamp = lastSample.timeStamp + lastSample.duration; } tr.length = s.timeStamp + s.duration; diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIOutputStream.java index 368d9c4..5e1f7cd 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIOutputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIOutputStream.java @@ -9,6 +9,7 @@ import org.monte.media.math.Rational; import org.monte.media.riff.RIFFChunk; import org.monte.media.riff.RIFFParser; +import org.monte.media.util.MathUtil; import javax.imageio.stream.FileImageOutputStream; import javax.imageio.stream.ImageOutputStream; @@ -326,7 +327,7 @@ public void setName(int track, String name) { */ public void setCompressionQuality(int track, float newValue) { Track t = tracks.get(track); - t.quality = Math.clamp((int) (newValue * 10_000f), 0, 10_000); + t.quality = MathUtil.clamp((int) (newValue * 10_000f), 0, 10_000); } /** @@ -336,7 +337,7 @@ public void setCompressionQuality(int track, float newValue) { */ public float getCompressionQuality(int track) { Track t = tracks.get(track); - return t.quality == -1 ? 0.97f : Math.clamp(t.quality / 10_000f, 0f, 1f); + return t.quality == -1 ? 0.97f : MathUtil.clamp(t.quality / 10_000f, 0f, 1f); } /** @@ -448,7 +449,7 @@ public void writeSample(int track, InputStream in, boolean isKeyframe) throws IO // If a keyframe sample is immediately preceeded by a palette change // we can raise the palette change to a keyframe. if (tr.samples.size() > 0) { - Sample s = tr.samples.getLast(); + Sample s = tr.samples.get(tr.samples.size() - 1); if ((s.chunkType & 0xffff) == PC_ID) { s.isKeyframe = true; } @@ -574,7 +575,7 @@ public long getMediaDuration(int track) { long duration = tr.startTime; if (!tr.samples.isEmpty()) { - Sample s = tr.samples.getLast(); + Sample s = tr.samples.get(tr.samples.size() - 1); duration += s.timeStamp + s.duration; } return duration; @@ -806,7 +807,7 @@ private void writeEpilog() throws IOException { // FIXME compute dwMicroSecPerFrame properly! - Track tt = tracks.getFirst(); + Track tt = tracks.get(0); d.writeInt((int) ((1000000L * tt.scale) / tt.rate)); // dwMicroSecPerFrame // Specifies the number of microseconds between frames. @@ -1009,7 +1010,8 @@ private void writeEpilog() throws IOException { // corner of the destination rectangle is relative to the upper-left // corner of the movie rectangle. - if (tr instanceof VideoTrack vt) { + if (tr instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) tr; Format vf = tr.format; /* Write BITMAPINFOHEADR Data into AVI Stream Format Chunk @@ -1088,7 +1090,8 @@ private void writeEpilog() throws IOException { d.write(0); } } - } else if (tr instanceof AudioTrack at) { + } else if (tr instanceof AudioTrack) { + AudioTrack at = (AudioTrack) tr; /* Write WAVEFORMATEX Data into AVI Stream Format Chunk /* ------------- diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIReader.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIReader.java index 913094e..843cad6 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIReader.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIReader.java @@ -14,14 +14,17 @@ import org.monte.media.av.codec.video.VideoFormatKeys; import org.monte.media.math.Rational; import org.monte.media.util.ArrayUtil; +import org.monte.media.util.MathUtil; import javax.imageio.stream.ImageInputStream; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; +import java.util.List; import static java.lang.Math.max; import static java.lang.Math.min; @@ -44,7 +47,16 @@ * @author Werner Randelshofer */ public class AVIReader extends AVIInputStream implements MovieReader { + private static class TrackEncoder { + /** + * The codec. + */ + public Codec codec; + public Buffer outputBuffer; + public Buffer inputBuffer; + } + private List trackEncoders = new ArrayList<>(); public final static Format AVI = new Format(MediaTypeKey, MediaType.FILE, MimeTypeKey, MIME_AVI); private Rational movieDuration = null; @@ -182,28 +194,29 @@ private void readPalette(byte[] pc, int offset, int length, byte[] r, byte[] g, */ public BufferedImage read(int track, BufferedImage img) throws IOException { AbstractAVIStream.Track tr = tracks.get(track); - if (tr.inputBuffer == null) { - tr.inputBuffer = new Buffer(); + TrackEncoder tre = getTrackEncoder(track); + if (tre.inputBuffer == null) { + tre.inputBuffer = new Buffer(); } - if (tr.codec == null) { - createCodec(tr); + if (tre.codec == null) { + createCodec(track); } Buffer buf = new Buffer(); buf.data = img; do { - read(track, tr.inputBuffer); - // FIXME - We assume a one-step codec here! - tr.codec.process(tr.inputBuffer, buf); + read(track, tre.inputBuffer); + tre.codec.process(tre.inputBuffer, buf); } while (buf.isFlag(DISCARD) && !buf.isFlag(END_OF_MEDIA)); - if (tr.inputBuffer.isFlag(END_OF_MEDIA)) { + if (tre.inputBuffer.isFlag(END_OF_MEDIA)) { return null; } return (BufferedImage) buf.data; } - private void createCodec(AbstractAVIStream.Track tr) throws IOException { + private void createCodec(int track) throws IOException { + AbstractAVIStream.Track tr = tracks.get(track); Format fmt = tr.format; Codec codec = createCodec(fmt); String enc = fmt.get(EncodingKey); @@ -222,8 +235,8 @@ private void createCodec(AbstractAVIStream.Track tr) throws IOException { } } } - - tr.codec = codec; + var tre = getTrackEncoder(track); + tre.codec = codec; } private Codec createCodec(Format fmt) { @@ -252,7 +265,7 @@ public int nextTrack() throws IOException { continue; } - AbstractAVIStream.Sample currentSample = tr.readIndex < tr.samples.size() ? tr.samples.get((int) tr.readIndex) : tr.samples.getLast(); + AbstractAVIStream.Sample currentSample = tr.readIndex < tr.samples.size() ? tr.samples.get((int) tr.readIndex) : tr.samples.get(tr.samples.size() - 1); long readTimeStamp = currentSample.timeStamp; if (tr.readIndex >= tr.samples.size()) { @@ -271,7 +284,7 @@ public int nextTrack() throws IOException { @Override - public Rational getDuration() throws IOException { + public Rational getMovieDuration() throws IOException { ensureRealized(); if (movieDuration == null) { Rational maxDuration = new Rational(0, 1); @@ -287,7 +300,7 @@ public Rational getDuration() throws IOException { } @Override - public Rational getDuration(int track) throws IOException { + public Rational getTrackDuration(int track) throws IOException { ensureRealized(); AbstractAVIStream.Track tr = tracks.get(track); return new Rational((tr.samples.size() * tr.scale + tr.startTime), tr.rate); @@ -299,18 +312,18 @@ public long getTimeScale(int track) throws IOException { } @Override - public long timeToSample(int track, Rational time) { + public long findSampleAtTime(int track, Rational time) { AbstractAVIStream.Track tr = tracks.get(track); Sample key = new Sample(0, 0, 0, 0, false); key.timeStamp = time.multiply(new Rational(tr.rate, tr.scale)).longValue(); int result = Collections.binarySearch(tr.samples, key, Comparator.comparingLong(a -> a.timeStamp)); if (result < 0) result = ~result - 1; - result = Math.clamp(result, 0, tr.samples.size() - 1); + result = MathUtil.clamp(result, 0, tr.samples.size() - 1); return result; } @Override - public Rational sampleToTime(int track, long sampleIndex) throws IOException { + public Rational getSampleTime(int track, long sampleIndex) throws IOException { ensureRealized(); AbstractAVIStream.Track tr = tracks.get(track); AbstractAVIStream.Sample sample = tr.samples.get((int) max(0, min(tr.samples.size() - 1, sampleIndex))); @@ -322,7 +335,7 @@ public Rational sampleToTime(int track, long sampleIndex) throws IOException { } @Override - public Rational getDuration(int track, long sampleIndex) throws IOException { + public Rational getSampleDuration(int track, long sampleIndex) throws IOException { ensureRealized(); AbstractAVIStream.Track tr = tracks.get(track); AbstractAVIStream.Sample sample = tr.samples.get((int) max(0, min(tr.samples.size() - 1, sampleIndex))); @@ -341,7 +354,7 @@ public void setMovieReadTime(Rational newValue) throws IOException { ensureRealized(); for (int t = 0, n = tracks.size(); t < n; t++) { AbstractAVIStream.Track tr = tracks.get(t); - int sample = (int) min(timeToSample(t, newValue), tr.samples.size() - 1); + int sample = (int) min(findSampleAtTime(t, newValue), tr.samples.size() - 1); for (; sample > 0 && !tr.samples.get(sample).isKeyframe; sample--) ; tr.readIndex = sample; } @@ -358,5 +371,11 @@ public int findTrack(int fromTrack, Format format) throws IOException { return -1; } + private TrackEncoder getTrackEncoder(int track) { + while (trackEncoders.size() < track) { + trackEncoders.add(new TrackEncoder()); + } + return trackEncoders.get(track); + } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIWriter.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIWriter.java index 95c3a04..5d4e425 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIWriter.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AVIWriter.java @@ -20,8 +20,10 @@ import java.io.File; import java.io.IOException; import java.nio.ByteOrder; +import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; +import java.util.List; import static org.monte.media.av.BufferFlag.DISCARD; import static org.monte.media.av.BufferFlag.KEYFRAME; @@ -64,6 +66,16 @@ * @author Werner Randelshofer */ public class AVIWriter extends AVIOutputStream implements MovieWriter { + private static class TrackEncoder { + /** + * The codec. + */ + public Codec codec; + public Buffer outputBuffer; + public Buffer inputBuffer; + } + + private List trackEncoders = new ArrayList<>(); public final static Format AVI = new Format(MediaTypeKey, MediaType.FILE, MimeTypeKey, MIME_AVI); public final static Format VIDEO_RAW = new Format( @@ -201,14 +213,14 @@ private int addAudioTrack(Format format) throws IOException { * Returns the codec of the specified track. */ public Codec getCodec(int track) { - return tracks.get(track).codec; + return getTrackEncoder(track).codec; } /** * Sets the codec for the specified track. */ public void setCodec(int track, Codec codec) { - tracks.get(track).codec = codec; + getTrackEncoder(track).codec = codec; } @Override @@ -229,9 +241,10 @@ public void write(int track, BufferedImage image, long duration) throws IOExcept ensureStarted(); AbstractAVIStream.VideoTrack vt = (AbstractAVIStream.VideoTrack) tracks.get(track); - if (vt.codec == null) { + TrackEncoder tre = getTrackEncoder(track); + if (tre.codec == null) { createCodec(track); - if (vt.codec == null) { + if (tre.codec == null) { throw new IOException("No codec for this format: " + vt.format); } } @@ -272,6 +285,7 @@ public void write(int track, Buffer buf) throws IOException { } AbstractAVIStream.Track tr = tracks.get(track); + TrackEncoder tre = getTrackEncoder(track); boolean isKeyframe = buf.flags.contains(KEYFRAME); if (buf.data instanceof BufferedImage) { @@ -300,19 +314,19 @@ public void write(int track, Buffer buf) throws IOException { // We got here, because the buffer format does not match the track // format. Let's see if we can create a codec which can perform the // encoding for us. - if (tr.codec == null) { + if (tre.codec == null) { createCodec(track); - if (tr.codec == null) { + if (tre.codec == null) { throw new UnsupportedOperationException("No codec for this format " + tr.format); } } - if (tr.outputBuffer == null) { - tr.outputBuffer = new Buffer(); + if (tre.outputBuffer == null) { + tre.outputBuffer = new Buffer(); } - Buffer outBuf = tr.outputBuffer; - if (tr.codec.process(buf, outBuf) != Codec.CODEC_OK) { - throw new IOException("Codec failed or could not encode the sample in a single step. codec:" + tr.codec); + Buffer outBuf = tre.outputBuffer; + if (tre.codec.process(buf, outBuf) != Codec.CODEC_OK) { + throw new IOException("Codec failed or could not encode the sample in a single step. codec:" + tre.codec); } if (outBuf.isFlag(DISCARD)) { return; @@ -322,7 +336,12 @@ public void write(int track, Buffer buf) throws IOException { } } - + private TrackEncoder getTrackEncoder(int track) { + while (trackEncoders.size() <= track) { + trackEncoders.add(new TrackEncoder()); + } + return trackEncoders.get(track); + } private boolean writePalette(int track, BufferedImage image, boolean isKeyframe) throws IOException { @@ -450,28 +469,29 @@ private boolean writePalette(int track, IndexColorModel imgPalette, boolean isKe private void createCodec(int track) { AbstractAVIStream.Track tr = tracks.get(track); + TrackEncoder tre = getTrackEncoder(track); Format fmt = tr.format; - tr.codec = Registry.getInstance().getEncoder(fmt); - if (tr.codec != null) { + tre.codec = Registry.getInstance().getEncoder(fmt); + if (tre.codec != null) { if (fmt.get(MediaTypeKey) == MediaType.VIDEO) { - tr.codec.setInputFormat(fmt.prepend( + tre.codec.setInputFormat(fmt.prepend( EncodingKey, ENCODING_BUFFERED_IMAGE, DataClassKey, BufferedImage.class)); - if (null == tr.codec.setOutputFormat( + if (null == tre.codec.setOutputFormat( fmt.prepend(FixedFrameRateKey, true, QualityKey, getCompressionQuality(track), MimeTypeKey, MIME_AVI, DataClassKey, byte[].class))) { - throw new UnsupportedOperationException("Track " + tr + " codec does not support format " + fmt + ". codec=" + tr.codec); + throw new UnsupportedOperationException("Track " + tr + " codec does not support format " + fmt + ". codec=" + tre.codec); } } else { - tr.codec.setInputFormat(null); - if (null == tr.codec.setOutputFormat( + tre.codec.setInputFormat(null); + if (null == tre.codec.setOutputFormat( fmt.prepend(FixedFrameRateKey, true, QualityKey, getCompressionQuality(track), MimeTypeKey, MIME_AVI, DataClassKey, byte[].class))) { - throw new UnsupportedOperationException("Track " + tr + " codec " + tr.codec + " does not support format. " + fmt); + throw new UnsupportedOperationException("Track " + tr + " codec " + tre.codec + " does not support format. " + fmt); } } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AbstractAVIStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AbstractAVIStream.java index 080c5fe..44a457e 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AbstractAVIStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/AbstractAVIStream.java @@ -491,19 +491,7 @@ protected abstract class Track { * The optional name of the track. */ protected String name; - /** - * The codec. - */ - public Codec codec; - /** - * The output buffer is used to store the output of the codec. - */ - public Buffer outputBuffer; - /** - * The input buffer is used when one of the convenience methods without - * a Buffer parameter is used. - */ - public Buffer inputBuffer; + /** * The current chunk index of the reader. */ @@ -528,7 +516,7 @@ public Track(int trackIndex, AVIMediaType mediaType, int fourCC) { public void addSample(Sample s) { if (!samples.isEmpty()) { - s.timeStamp = samples.getLast().timeStamp + samples.getLast().duration; + s.timeStamp = samples.get(samples.size() - 1).timeStamp + samples.get(samples.size() - 1).duration; } samples.add(s); length++; @@ -1399,7 +1387,7 @@ public CompositeChunk(int compositeType, int chunkType) throws IOException { public void add(Chunk child) throws IOException { if (children.size() > 0) { - children.getLast().finish(); + children.get(children.size() - 1).finish(); } children.add(child); } @@ -1453,7 +1441,6 @@ public long size() { */ protected class DataChunk extends Chunk { - //protected SubImageOutputStream data; protected boolean finished; private long finishedSize; @@ -1477,7 +1464,7 @@ public DataChunk(int name) throws IOException { public DataChunk(int name, long dataSize) throws IOException { super(name); /* - data = new SubImageOutputStream(out, ByteOrder.LITTLE_ENDIAN, false); + data = new FilterImageOutputStream(out, ByteOrder.LITTLE_ENDIAN, false); data.writeInt(typeToInt(chunkType)); data.writeInt((int)Math.max(0, dataSize)); */ out.setByteOrder(ByteOrder.BIG_ENDIAN); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/DataChunkOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/DataChunkOutputStream.java index 43f9b18..64481ae 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/DataChunkOutputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/DataChunkOutputStream.java @@ -4,7 +4,7 @@ */ package org.monte.media.avi; -import org.monte.media.io.ByteArray; +import org.monte.media.util.ByteArrays; import java.io.FilterOutputStream; import java.io.IOException; @@ -118,7 +118,7 @@ public synchronized void write(int b) throws IOException { * @see java.io.FilterOutputStream#out */ public void writeInt(int v) throws IOException { - ByteArray.setIntLE(byteBuffer, 0, v); + ByteArrays.setIntLE(byteBuffer, 0, v); out.write(byteBuffer, 0, 4); incCount(4); } @@ -130,7 +130,7 @@ public void writeInt(int v) throws IOException { * @throws java.io.IOException */ public void writeUInt(long v) throws IOException { - ByteArray.setIntLE(byteBuffer, 0, (int) v); + ByteArrays.setIntLE(byteBuffer, 0, (int) v); out.write(byteBuffer, 0, 4); incCount(4); } @@ -142,7 +142,7 @@ public void writeUInt(long v) throws IOException { * @throws java.io.IOException */ public void writeShort(int v) throws IOException { - ByteArray.setShortLE(byteBuffer, 0, (short) v); + ByteArrays.setShortLE(byteBuffer, 0, (short) v); out.write(byteBuffer, 0, 2); incCount(2); } @@ -177,13 +177,13 @@ public void writeInts24(int[] v, int off, int len) throws IOException { } public void writeLong(long v) throws IOException { - ByteArray.setLongLE(byteBuffer, 0, v); + ByteArrays.setLongLE(byteBuffer, 0, v); out.write(byteBuffer, 0, 8); incCount(8); } public void writeUShort(int v) throws IOException { - ByteArray.setShortLE(byteBuffer, 0, (short) v); + ByteArrays.setShortLE(byteBuffer, 0, (short) v); out.write(byteBuffer, 0, 2); incCount(2); } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/codec/video/DIBCodec.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/codec/video/DIBCodec.java index c61c5b1..d55e92e 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/codec/video/DIBCodec.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/avi/codec/video/DIBCodec.java @@ -8,9 +8,10 @@ import org.monte.media.av.Format; import org.monte.media.av.FormatKeys.MediaType; import org.monte.media.av.codec.video.AbstractVideoCodec; -import org.monte.media.io.SeekableByteArrayOutputStream; +import org.monte.media.io.ByteArrayImageOutputStream; import org.monte.media.util.ArrayUtil; +import javax.imageio.stream.ImageOutputStream; import java.awt.Rectangle; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; @@ -18,7 +19,6 @@ import java.awt.image.DataBufferInt; import java.awt.image.WritableRaster; import java.io.IOException; -import java.io.OutputStream; import static org.monte.media.av.BufferFlag.DISCARD; import static org.monte.media.av.BufferFlag.KEYFRAME; @@ -188,7 +188,7 @@ public int encode(Buffer in, Buffer out) { return CODEC_OK; } - SeekableByteArrayOutputStream tmp = new SeekableByteArrayOutputStream(ArrayUtil.reuseByteArray(out.data, 32)); + ByteArrayImageOutputStream tmp = new ByteArrayImageOutputStream(ArrayUtil.reuseByteArray(out.data, 32)); // Handle sub-image // FIXME - Scanline stride must be a multiple of four. @@ -320,7 +320,7 @@ public void readKey24(byte[] in, int offset, int length, BufferedImage img) { * @param width The width of the image in data elements. * @param scanlineStride The number to append to offset to get to the next scanline. */ - public void writeKey4(OutputStream out, byte[] pixels, int width, int height, int offset, int scanlineStride) + public void writeKey4(ImageOutputStream out, byte[] pixels, int width, int height, int offset, int scanlineStride) throws IOException { byte[] bytes = new byte[width / 2 + width % 2]; @@ -342,7 +342,7 @@ public void writeKey4(OutputStream out, byte[] pixels, int width, int height, in * @param width The width of the image in data elements. * @param scanlineStride The number to append to offset to get to the next scanline. */ - public void writeKey8(OutputStream out, byte[] pixels, int width, int height, int offset, int scanlineStride) + public void writeKey8(ImageOutputStream out, byte[] pixels, int width, int height, int offset, int scanlineStride) throws IOException { for (int y = (height - 1) * scanlineStride; y >= 0; y -= scanlineStride) { // Upside down @@ -359,7 +359,7 @@ public void writeKey8(OutputStream out, byte[] pixels, int width, int height, in * @param width The width of the image in data elements. * @param scanlineStride The number to append to offset to get to the next scanline. */ - public void writeKey24(OutputStream out, int[] pixels, int width, int height, int offset, int scanlineStride) + public void writeKey24(ImageOutputStream out, int[] pixels, int width, int height, int offset, int scanlineStride) throws IOException { int w3 = width * 3; byte[] bytes = new byte[w3]; // holds a scanline of raw image data with 3 channels of 8 bit data diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/ColorModels.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/ColorModels.java index 24816a6..6343bc9 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/ColorModels.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/ColorModels.java @@ -10,7 +10,7 @@ import java.awt.image.IndexColorModel; import java.util.Arrays; -import static java.lang.Math.clamp; +import static org.monte.media.util.MathUtil.clamp; /** * Utility methods for ColorModels. @@ -30,7 +30,8 @@ private ColorModels() { */ public static String toString(ColorModel cm) { StringBuilder buf = new StringBuilder(); - if (cm instanceof DirectColorModel dcm) { + if (cm instanceof DirectColorModel) { + DirectColorModel dcm = (DirectColorModel) cm; buf.append("Direct Color Model "); int[] masks = dcm.getMasks(); @@ -59,7 +60,8 @@ public static String toString(ColorModel cm) { for (MaskEntry entry : entries) { buf.append(entry); } - } else if (cm instanceof IndexColorModel icm) { + } else if (cm instanceof IndexColorModel) { + IndexColorModel icm = (IndexColorModel) cm; buf.append("Index Color Model "); int mapSize = icm.getMapSize(); buf.append(icm.getMapSize()); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/WhiteBalance.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/WhiteBalance.java index 223c5e3..693dcec 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/WhiteBalance.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/WhiteBalance.java @@ -11,8 +11,8 @@ import java.awt.image.DataBufferInt; import static java.lang.Math.abs; -import static java.lang.Math.clamp; import static java.lang.Math.max; +import static org.monte.media.util.MathUtil.clamp; /** * {@code WhiteBalance}. diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/YccConverters.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/YccConverters.java index 265801f..4af7cc9 100644 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/YccConverters.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/color/YccConverters.java @@ -5,7 +5,8 @@ package org.monte.media.color; -import static java.lang.Math.clamp; +import static org.monte.media.util.MathUtil.clamp; + public class YccConverters { /** diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFReader.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFReader.java index 62b12cb..2173a8f 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFReader.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFReader.java @@ -7,8 +7,8 @@ import org.monte.media.exception.AbortException; import org.monte.media.exception.ParseException; import org.monte.media.io.ByteArrayImageInputStream; +import org.monte.media.io.ByteArrayImageOutputStream; import org.monte.media.io.ImageInputStreamAdapter; -import org.monte.media.io.SeekableByteArrayOutputStream; import org.monte.media.jfif.JFIFInputStream; import org.monte.media.jfif.JFIFInputStream.Segment; import org.monte.media.math.Rational; @@ -37,7 +37,9 @@ import java.io.IOException; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Deque; import java.util.HashMap; import java.util.Iterator; import java.util.Stack; @@ -145,10 +147,10 @@ public void read() throws IOException { private void readJFIF(ImageInputStream iin) throws IOException { root = new TIFFDirectory(null, null, -1); - SeekableByteArrayOutputStream exifStream = null; + ByteArrayImageOutputStream exifStream = null; ArrayList exifSeg = null; - SeekableByteArrayOutputStream mpStream = null; + ByteArrayImageOutputStream mpStream = null; ArrayList mpSeg = null; byte[] buf = new byte[512]; @@ -202,10 +204,10 @@ private void readJFIF(ImageInputStream iin) throws IOException { case JFIFInputStream.SOI_MARKER: imageNode = new TIFFDirectory(ImageTagSet.getInstance(), null, imageCount++, 0, in.getStreamPosition(), new FileSegment(seg.offset, seg.length)); root.add(imageNode); - exifStream = new SeekableByteArrayOutputStream(); + exifStream = new ByteArrayImageOutputStream(); exifSeg = new ArrayList<>(); - mpStream = new SeekableByteArrayOutputStream(); + mpStream = new ByteArrayImageOutputStream(); mpSeg = new ArrayList<>(); break; @@ -251,14 +253,14 @@ private void readJFIF(ImageInputStream iin) throws IOException { break; case JFIFInputStream.SOS_MARKER: // Extract the Exif data - if (exifStream.size() > 0) { + if (exifStream.length() > 0) { TIFFInputStream tin = new TIFFInputStream(new ByteArrayImageInputStream(exifStream.getBuffer(), 0, exifStream.size(), ByteOrder.BIG_ENDIAN)); readTIFFIFD(tin, imageNode, exifSeg); exifStream.reset(); } // Extract the MP data - if (mpStream.size() > 0) { - TIFFInputStream tin = new TIFFInputStream(new ByteArrayImageInputStream(mpStream.getBuffer(), 0, mpStream.size(), ByteOrder.BIG_ENDIAN)); + if (mpStream.length() > 0) { + TIFFInputStream tin = new TIFFInputStream(new ByteArrayImageInputStream(mpStream.getBuffer(), 0, (int) mpStream.length(), ByteOrder.BIG_ENDIAN)); readMPFIFD(tin, imageNode, null, mpSeg); mpStream.reset(); } @@ -625,8 +627,8 @@ public int getImageCount() { * Returns all IFDDirectories of the specified tag set for the given image. */ public ArrayList getDirectories(int image, TagSet tagSet) { - ArrayList dirs = new ArrayList<>(); - Stack stack = new Stack<>(); + Deque dirs = new ArrayDeque<>(); + Deque stack = new ArrayDeque<>(); stack.push((TIFFDirectory) getMetaDataTree().getChildAt(image)); while (!stack.isEmpty()) { TIFFDirectory dir = stack.pop(); @@ -642,18 +644,18 @@ public ArrayList getDirectories(int image, TagSet tagSet) { } } } - return dirs; + return new ArrayList<>(dirs); } /** * Returns all thumbnails. */ public ArrayList getThumbnails(boolean suppressException) throws IOException { - ArrayList thumbnails = new ArrayList<>(); + ArrayDeque thumbnails = new ArrayDeque<>(); Stack stack = new Stack<>(); stack.push((TIFFDirectory) getMetaDataTree()); if (stack.peek() == null) { - return thumbnails; + return new ArrayList<>(thumbnails); } while (!stack.isEmpty()) { TIFFDirectory dir = stack.pop(); @@ -676,7 +678,7 @@ public ArrayList getThumbnails(boolean suppressException) throws } } } - return thumbnails; + return new ArrayList<>(thumbnails); } /** diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFTagSet.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFTagSet.java index 141997d..b5a542f 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFTagSet.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/exif/EXIFTagSet.java @@ -53,7 +53,7 @@ public class EXIFTagSet extends TagSet { // TIFFTag Relating to Related File Information public final static TIFFTag RelatedSoundFile = new TIFFTag("RelatedSoundFile", 0xa004, ASCII_MASK); // - // Tags Relating to Date and Time + // Tags Relating to Instant and Time public final static TIFFTag DateTimeOriginal = new TIFFTag("DateTimeOriginal", 0x9003, ASCII_MASK, new DateValueFormatter()); public final static TIFFTag DateTimeDigitized = new TIFFTag("DateTimeDigitized", 0x9004, ASCII_MASK, new DateValueFormatter()); public final static TIFFTag SubSecTime = new TIFFTag("SubSecTime", 0x9290, ASCII_MASK); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/image/CMYKImages.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/image/CMYKImages.java index 4706fbc..c403a77 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/image/CMYKImages.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/image/CMYKImages.java @@ -25,8 +25,8 @@ import java.util.concurrent.ExecutionException; import java.util.stream.IntStream; -import static java.lang.Math.clamp; import static java.lang.Math.min; +import static org.monte.media.util.MathUtil.clamp; /** * Provides utility methods for images in the CMYK color space.. diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageInputStream.java index aa709fb..97018ca 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageInputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageInputStream.java @@ -4,9 +4,10 @@ */ package org.monte.media.io; +import org.monte.media.util.ByteArrays; + import java.io.EOFException; import java.io.IOException; -import java.io.InputStream; import java.nio.ByteOrder; /** @@ -200,8 +201,8 @@ public int readInt() throws IOException { throw new EOFException(); } int v = (byteOrder == ByteOrder.BIG_ENDIAN) - ? ByteArray.getIntBE(buf, (int) streamPos) - : ByteArray.getIntLE(buf, (int) streamPos); + ? ByteArrays.getIntBE(buf, (int) streamPos) + : ByteArrays.getIntLE(buf, (int) streamPos); streamPos += 4; return v; } @@ -212,8 +213,8 @@ public long readLong() throws IOException { throw new EOFException(); } long v = (byteOrder == ByteOrder.BIG_ENDIAN) - ? ByteArray.getLongBE(buf, (int) streamPos) - : ByteArray.getLongLE(buf, (int) streamPos); + ? ByteArrays.getLongBE(buf, (int) streamPos) + : ByteArrays.getLongLE(buf, (int) streamPos); streamPos += 8; return v; } @@ -224,8 +225,8 @@ public short readShort() throws IOException { throw new EOFException(); } short v = (byteOrder == ByteOrder.BIG_ENDIAN) - ? ByteArray.getShortBE(buf, (int) streamPos) - : ByteArray.getShortLE(buf, (int) streamPos); + ? ByteArrays.getShortBE(buf, (int) streamPos) + : ByteArrays.getShortLE(buf, (int) streamPos); streamPos += 2; return v; } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageOutputStream.java index 189421b..b2b0351 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageOutputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArrayImageOutputStream.java @@ -5,6 +5,8 @@ package org.monte.media.io; +import org.monte.media.util.ByteArrays; + import javax.imageio.stream.ImageOutputStream; import javax.imageio.stream.ImageOutputStreamImpl; import java.io.IOException; @@ -320,6 +322,10 @@ public long length() { return count - arrayOffset; } + public int size() { + return (int) length(); + } + /** * Resets the count field of this byte array output * stream to zero, so that all currently accumulated output in the @@ -336,9 +342,9 @@ public void writeShort(int v) throws IOException { flushBits(); growBy(2); if (byteOrder == ByteOrder.BIG_ENDIAN) { - ByteArray.setUShortBE(buf, (int) streamPos, v); + ByteArrays.setUShortBE(buf, (int) streamPos, v); } else { - ByteArray.setUShortLE(buf, (int) streamPos, v); + ByteArrays.setUShortLE(buf, (int) streamPos, v); } streamPos += 2; } @@ -362,9 +368,9 @@ public void writeInt(int v) throws IOException { flushBits(); growBy(4); if (byteOrder == ByteOrder.BIG_ENDIAN) { - ByteArray.setIntBE(buf, (int) streamPos, v); + ByteArrays.setIntBE(buf, (int) streamPos, v); } else { - ByteArray.setIntLE(buf, (int) streamPos, v); + ByteArrays.setIntLE(buf, (int) streamPos, v); } streamPos += 4; } @@ -374,9 +380,9 @@ public void writeLong(long v) throws IOException { flushBits(); growBy(8); if (byteOrder == ByteOrder.BIG_ENDIAN) { - ByteArray.setLongBE(buf, (int) streamPos, v); + ByteArrays.setLongBE(buf, (int) streamPos, v); } else { - ByteArray.setLongLE(buf, (int) streamPos, v); + ByteArrays.setLongLE(buf, (int) streamPos, v); } streamPos += 8; } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageInputStream.java index 85ecbf2..745e1b0 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageInputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageInputStream.java @@ -8,81 +8,93 @@ import java.io.IOException; /** - * {@code FilterImageInputStream}. + * FilterImageInputStream. * * @author Werner Randelshofer */ public class FilterImageInputStream extends ImageInputStreamImpl2 { - /** - * The underlying input stream. - */ - protected ImageInputStream in; - public FilterImageInputStream(ImageInputStream in) { - this.in = in; - } + private final ImageInputStream in; + private final long offset; + private final long length; - @Override - public int read() throws IOException { - flushBits(); - return in.read(); + public FilterImageInputStream(ImageInputStream in) throws IOException { + this(in, in.getStreamPosition(), in.length() - in.getStreamPosition()); } - @Override - public int read(byte[] b, int off, int len) throws IOException { - flushBits(); - return in.read(b, off, len); + public FilterImageInputStream(ImageInputStream in, long offset, long length) throws IOException { + this.in = in; + this.offset = offset; + this.length = length; + if (in.length() != -1 && offset + length > in.length()) { + throw new IllegalArgumentException("Offset too large. offset=" + offset + " length=" + length + " in.length=" + in.length()); + } + // setByteOrder(in.getByteOrder()); + in.seek(offset); } - @Override - public int skipBytes(int n) throws IOException { - flushBits(); - return in.skipBytes(n); + private long available() throws IOException { + checkClosed(); + long pos = in.getStreamPosition(); + if (pos < offset) { + in.seek(offset); + pos = offset; + } + return offset + length - pos; } @Override - public long skipBytes(long n) throws IOException { - flushBits(); - return in.skipBytes(n); + public int read() throws IOException { + if (available() <= 0) { + return -1; + } else { + return in.read(); + } } @Override - public void close() throws IOException { - // Do nothing! + public int read(byte[] b, int off, int len) throws IOException { + long av = available(); + if (av <= 0) { + return -1; + } else { + return in.read(b, off, (int) Math.min(len, av)); + } } @Override public long getStreamPosition() throws IOException { - return in.getStreamPosition(); + return in.getStreamPosition() - offset; } @Override public void seek(long pos) throws IOException { - flushBits(); - in.seek(pos); + in.seek(pos + offset); } @Override - public long length() { - try { - return in.length(); - } catch (IOException ex) { - return -1L; - } + public void flush() throws IOException { + in.flush(); } @Override - public void flushBefore(long pos) throws IOException { - super.flushBefore(pos); - in.flushBefore(pos); + public long getFlushedPosition() { + return in.getFlushedPosition() - offset; } - + /** + * Default implementation returns false. Subclasses should + * override this if they cache data. + */ @Override public boolean isCached() { return in.isCached(); } + /** + * Default implementation returns false. Subclasses should + * override this if they cache data in main memory. + */ @Override public boolean isCachedMemory() { return in.isCachedMemory(); @@ -93,7 +105,8 @@ public boolean isCachedFile() { return in.isCachedFile(); } - private void flushBits() { - bitOffset = 0; + @Override + public long length() { + return length; } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageOutputStream.java similarity index 60% rename from org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageOutputStream.java rename to org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageOutputStream.java index bd888bc..6b7081b 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageOutputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/FilterImageOutputStream.java @@ -1,5 +1,5 @@ /* - * @(#)SubImageOutputStream.java + * @(#)FilterImageOutputStream.java * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. */ package org.monte.media.io; @@ -10,28 +10,49 @@ import java.nio.ByteOrder; /** - * {@code SubImageOutputStream}. + * {@code FilterImageOutputStream}. * * @author Werner Randelshofer */ -public class SubImageOutputStream extends ImageOutputStreamImpl { +public class FilterImageOutputStream extends ImageOutputStreamImpl { - private ImageOutputStream out; + private final ImageOutputStream out; + private long maxStreamPos; private final long offset; - private long length; /** * Whether flush and close request shall be forwarded to underlying stream. */ private final boolean forwardFlushAndClose; - public SubImageOutputStream(ImageOutputStream out, ByteOrder bo, boolean forwardFlushAndClose) throws IOException { - this(out, out.getStreamPosition(), bo, forwardFlushAndClose); + /** + * Creates a new instance that does not close the underlying stream when this instance is closed. + *

+ * The stream position of this instance is relative to the stream position of the underlying stream + * when the instance was created. + * + * @param out the underlying stream. + * @throws IOException on IO failure + */ + public FilterImageOutputStream(ImageOutputStream out) throws IOException { + this(out, out.getStreamPosition(), out.getByteOrder(), false); } - public SubImageOutputStream(ImageOutputStream out, long offset, ByteOrder bo, boolean forwardFlushAndClose) throws IOException { + /** + * Creates a new instance that optionally closes the underlying stream when this instance is closed. + *

+ * The stream position of this instance is relative to the specified offset. + * + * @param out the underlying stream + * @param offset the offset into the underlying stream. + * @param bo the byte order (will be set on the underlying stream) + * @param forwardFlushAndClose whether to forward flush and close to the underlying stream + * @throws IOException on IO failure + */ + public FilterImageOutputStream(ImageOutputStream out, long offset, ByteOrder bo, boolean forwardFlushAndClose) throws IOException { this.out = out; this.offset = offset; + this.maxStreamPos=offset; this.forwardFlushAndClose = forwardFlushAndClose; setByteOrder(bo); out.seek(offset); @@ -74,7 +95,6 @@ public long getStreamPosition() throws IOException { @Override public void seek(long pos) throws IOException { out.seek(pos + offset); - length = Math.max(pos - offset + 1, length); } @Override @@ -121,25 +141,24 @@ public boolean isCachedFile() { @Override public long length() { - return length; + return maxStreamPos - offset; } @Override - public void write(int b) throws IOException { + public final void write(int b) throws IOException { out.write(b); - length = Math.max(out.getStreamPosition() - offset, length); + maxStreamPos=Math.max(maxStreamPos, out.getStreamPosition()); } @Override - public void write(byte[] b, int off, int len) throws IOException { + public final void write(byte[] b, int off, int len) throws IOException { out.write(b, off, len); - length = Math.max(out.getStreamPosition() - offset, length); + maxStreamPos=Math.max(maxStreamPos,out.getStreamPosition()); } public void dispose() throws IOException { if (forwardFlushAndClose) { checkClosed(); } - out = null; } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ImageInputStreamImpl2.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ImageInputStreamImpl2.java index 5d57467..2d8a22d 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ImageInputStreamImpl2.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ImageInputStreamImpl2.java @@ -4,6 +4,8 @@ */ package org.monte.media.io; +import org.monte.media.util.ByteArrays; + import javax.imageio.stream.ImageInputStreamImpl; import java.io.IOException; import java.nio.ByteOrder; @@ -33,14 +35,14 @@ public abstract class ImageInputStreamImpl2 extends ImageInputStreamImpl { public short readShort() throws IOException { readFully(byteBuf, 0, 2); return (byteOrder == ByteOrder.BIG_ENDIAN) - ? ByteArray.getShortBE(byteBuf, 0) - : ByteArray.getShortLE(byteBuf, 0); + ? ByteArrays.getShortBE(byteBuf, 0) + : ByteArrays.getShortLE(byteBuf, 0); } public int readInt() throws IOException { readFully(byteBuf, 0, 4); return (byteOrder == ByteOrder.BIG_ENDIAN) - ? ByteArray.getIntBE(byteBuf, 0) - : ByteArray.getIntLE(byteBuf, 0); + ? ByteArrays.getIntBE(byteBuf, 0) + : ByteArrays.getIntLE(byteBuf, 0); } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SeekableByteArrayOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SeekableByteArrayOutputStream.java deleted file mode 100755 index 36c7d95..0000000 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SeekableByteArrayOutputStream.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * @(#)SeekableByteArrayOutputStream.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. - */ - -package org.monte.media.io; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Arrays; - -import static java.lang.Math.max; - -/** - * {@code SeekableByteArrayOutputStream}. - * - * @author Werner Randelshofer - */ -public class SeekableByteArrayOutputStream extends ByteArrayOutputStream { - - /** - * The current stream position. - */ - private int pos; - - /** - * Creates a new byte array output stream. The buffer capacity is - * initially 32 bytes, though its size increases if necessary. - */ - public SeekableByteArrayOutputStream() { - this(32); - } - - /** - * Creates a new byte array output stream, with a buffer capacity of - * the specified size, in bytes. - * - * @param size the initial size. - * @throws IllegalArgumentException if size is negative. - */ - public SeekableByteArrayOutputStream(int size) { - if (size < 0) { - throw new IllegalArgumentException("Negative initial size: " - + size); - } - buf = new byte[size]; - } - - /** - * Creates a new byte array output stream, which reuses the supplied buffer. - */ - public SeekableByteArrayOutputStream(byte[] buf) { - this.buf = buf; - } - - /** - * Writes the specified byte to this byte array output stream. - * - * @param b the byte to be written. - */ - @Override - public synchronized void write(int b) { - int newcount = max(pos + 1, count); - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - buf[pos++] = (byte) b; - count = newcount; - } - - /** - * Writes len bytes from the specified byte array - * starting at offset off to this byte array output stream. - * - * @param b the data. - * @param off the start offset in the data. - * @param len the number of bytes to write. - */ - @Override - public synchronized void write(byte b[], int off, int len) { - if ((off < 0) || (off > b.length) || (len < 0) || - ((off + len) > b.length) || ((off + len) < 0)) { - throw new IndexOutOfBoundsException(); - } else if (len == 0) { - return; - } - int newcount = max(pos + len, count); - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - System.arraycopy(b, off, buf, pos, len); - pos += len; - count = newcount; - } - - /** - * Resets the count field of this byte array output - * stream to zero, so that all currently accumulated output in the - * output stream is discarded. The output stream can be used again, - * reusing the already allocated buffer space. - */ - @Override - public synchronized void reset() { - count = 0; - pos = 0; - } - - /** - * Sets the current stream position to the desired location. The - * next read will occur at this location. The bit offset is set - * to 0. - * - *

An IndexOutOfBoundsException will be thrown if - * pos is smaller than the flushed position (as - * returned by getFlushedPosition). - * - *

It is legal to seek past the end of the file; an - * EOFException will be thrown only if a read is - * performed. - * - * @param pos a long containing the desired file - * pointer position. - * @throws IndexOutOfBoundsException if pos is smaller - * than the flushed position. - * @throws IOException if any other I/O error occurs. - */ - public void seek(long pos) throws IOException { - this.pos = (int) pos; - } - - /** - * Returns the current byte position of the stream. The next write - * will take place starting at this offset. - * - * @return a long containing the position of the stream. - * @throws IOException if an I/O error occurs. - */ - public long getStreamPosition() throws IOException { - return pos; - } - - /** - * Writes the contents of the byte array into the specified output - * stream. - * - * @param out - */ - public void toOutputStream(OutputStream out) throws IOException { - out.write(buf, 0, count); - } - - /** - * Returns the underlying byte buffer. - */ - public byte[] getBuffer() { - return buf; - } -} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageInputStream.java deleted file mode 100755 index c36d723..0000000 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/SubImageInputStream.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * @(#)SubImageInputStream.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. - */ -package org.monte.media.io; - -import javax.imageio.stream.ImageInputStream; -import java.io.IOException; - -/** - * SubImageInputStream. - * - * @author Werner Randelshofer - */ -public class SubImageInputStream extends ImageInputStreamImpl2 { - - private final ImageInputStream in; - private final long offset; - private final long length; - - public SubImageInputStream(ImageInputStream in, long offset, long length) throws IOException { - this.in = in; - this.offset = offset; - this.length = length; - if (in.length() != -1 && offset + length > in.length()) { - throw new IllegalArgumentException("Offset too large. offset=" + offset + " length=" + length + " in.length=" + in.length()); - } - // setByteOrder(in.getByteOrder()); - in.seek(offset); - } - - private long available() throws IOException { - checkClosed(); - long pos = in.getStreamPosition(); - if (pos < offset) { - in.seek(offset); - pos = offset; - } - return offset + length - pos; - } - - @Override - public int read() throws IOException { - if (available() <= 0) { - return -1; - } else { - return in.read(); - } - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - long av = available(); - if (av <= 0) { - return -1; - } else { - return in.read(b, off, (int) Math.min(len, av)); - } - } - - @Override - public long getStreamPosition() throws IOException { - return in.getStreamPosition() - offset; - } - - @Override - public void seek(long pos) throws IOException { - in.seek(pos + offset); - } - - @Override - public void flush() throws IOException { - in.flush(); - } - - @Override - public long getFlushedPosition() { - return in.getFlushedPosition() - offset; - } - - /** - * Default implementation returns false. Subclasses should - * override this if they cache data. - */ - @Override - public boolean isCached() { - return in.isCached(); - } - - /** - * Default implementation returns false. Subclasses should - * override this if they cache data in main memory. - */ - @Override - public boolean isCachedMemory() { - return in.isCachedMemory(); - } - - @Override - public boolean isCachedFile() { - return in.isCachedFile(); - } - - @Override - public long length() { - return length; - } -} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/jpeg/CMYKJPEGImageReader.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/jpeg/CMYKJPEGImageReader.java index cad85d8..cb14002 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/jpeg/CMYKJPEGImageReader.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/jpeg/CMYKJPEGImageReader.java @@ -6,8 +6,9 @@ import org.monte.media.image.CMYKImages; import org.monte.media.io.ByteArrayImageInputStream; +import org.monte.media.io.ByteArrayImageOutputStream; +import org.monte.media.io.IOStreams; import org.monte.media.io.ImageInputStreamAdapter; -import org.monte.media.io.SeekableByteArrayOutputStream; import org.monte.media.jfif.JFIFInputStream; import javax.imageio.ImageIO; @@ -192,7 +193,7 @@ public static BufferedImage read(ImageInputStream in, boolean inverseYCCKColors, int numberOfSamplesPerLine = 0; int numberOfComponentsInFrame = 0; int app14AdobeColorTransform = 0; - SeekableByteArrayOutputStream app2ICCProfile = new SeekableByteArrayOutputStream(); + ByteArrayImageOutputStream app2ICCProfile = new ByteArrayImageOutputStream(); // Browse for marker segments, and extract data from those // which are of interest. JFIFInputStream fifi = new JFIFInputStream(new ImageInputStreamAdapter(in)); @@ -224,7 +225,7 @@ public static BufferedImage read(ImageInputStream in, boolean inverseYCCKColors, // Read Adobe ICC_PROFILE int buffer. The profile is split up over // multiple APP2 marker segments. - dis.transferTo(app2ICCProfile); + IOStreams.copy(dis, app2ICCProfile); } } } else if (seg.marker == 0xffee) { diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4OutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4OutputStream.java new file mode 100755 index 0000000..0191d31 --- /dev/null +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4OutputStream.java @@ -0,0 +1,2421 @@ +/* + * @(#)MP4OutputStream.java + * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + */ +package org.monte.media.mp4; + +import org.monte.media.av.Format; +import org.monte.media.av.codec.video.VideoFormatKeys; +import org.monte.media.io.ByteArrayImageOutputStream; +import org.monte.media.io.IOStreams; +import org.monte.media.io.ImageOutputStreamAdapter; +import org.monte.media.math.Rational; +import org.monte.media.qtff.AbstractQTFFMovieStream; +import org.monte.media.qtff.AvcDecoderConfigurationRecord; +import org.monte.media.qtff.QTFFImageOutputStream; +import org.monte.media.util.ByteArray; +import org.monte.media.util.MathUtil; + +import javax.imageio.stream.FileImageOutputStream; +import javax.imageio.stream.ImageOutputStream; +import javax.imageio.stream.MemoryCacheImageOutputStream; +import java.awt.image.ColorModel; +import java.awt.image.IndexColorModel; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteOrder; +import java.time.Instant; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.Objects; +import java.util.Set; +import java.util.zip.DeflaterOutputStream; + +import static java.lang.Math.max; +import static org.monte.media.av.FormatKeys.DataClassKey; +import static org.monte.media.av.FormatKeys.EncodingKey; +import static org.monte.media.av.FormatKeys.MIME_QUICKTIME; +import static org.monte.media.av.FormatKeys.MediaType; +import static org.monte.media.av.FormatKeys.MediaTypeKey; +import static org.monte.media.av.FormatKeys.MimeTypeKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ByteOrderKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ChannelsKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.FrameSizeKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleRateKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleSizeInBitsKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SignedKey; + +/** + * This class provides low-level support for writing already encoded audio and + * video samples into a MP4 file. + * + * @author Werner Randelshofer + */ +public class MP4OutputStream extends AbstractQTFFMovieStream { + + /** + * Creates a new instance. + * + * @param file the output file + */ + public MP4OutputStream(File file) throws IOException { + if (file.exists()) { + if (!file.delete()) throw new IOException("can not delete file"); + } + this.out = new FileImageOutputStream(file); + this.streamOffset = 0; + init(); + } + + /** + * Creates a new instance. + * + * @param out the output stream. + */ + public MP4OutputStream(ImageOutputStream out) throws IOException { + this.out = out; + this.streamOffset = out.getStreamPosition(); + init(); + } + + private void init() { + creationTime = modificationTime = Instant.ofEpochMilli(0); + } + + /** + * Sets the time scale for this movie, that is, the number of time units + * that pass per second in its time coordinate system.

The default value + * is 600. + * + * @param timeScale + */ + public void setMovieTimeScale(long timeScale) { + if (timeScale < 1 || timeScale > (2L << 32)) { + throw new IllegalArgumentException("timeScale must be between 1 and 2^32:" + timeScale); + } + this.movieTimeScale = timeScale; + } + + /** + * Returns the time scale of the movie. + * + * @return time scale + * @see #setMovieTimeScale(long) + */ + public long getMovieTimeScale() { + return movieTimeScale; + } + + /** + * Returns the time scale of the media in a track. + * + * @param track Track index. + * @return time scale + * @see #setMovieTimeScale(long) + */ + public long getMediaTimeScale(int track) { + return tracks.get(track).mediaTimeScale; + } + + /** + * Returns the media duration of a track in the media's time scale. + * + * @param track Track index. + * @return media duration + */ + public long getMediaDuration(int track) { + return tracks.get(track).mediaDuration; + } + + /** + * Returns the track duration in the movie's time scale without taking the + * edit list into account.

The returned value is the media duration of + * the track in the movies's time scale. + * + * @param track Track index. + * @return unedited track duration + */ + public long getUneditedTrackDuration(int track) { + Track t = tracks.get(track); + return t.mediaDuration * t.mediaTimeScale / movieTimeScale; + } + + /** + * Returns the track duration in the movie's time scale.

If the track + * has an edit-list, the track duration is the sum of all edit durations. + *

If the track does not have an edit-list, then this method returns the + * media duration of the track in the movie's time scale. + * + * @param track Track index. + * @return track duration + */ + public long getTrackDuration(int track) { + return tracks.get(track).getTrackDuration(movieTimeScale); + } + + /** + * Returns the total duration of the movie in the movie's time scale. + * + * @return media duration + */ + public long getMovieDuration() { + long duration = 0; + for (Track t : tracks) { + duration = Math.max(duration, t.getTrackDuration(movieTimeScale)); + } + return duration; + } + + /** + * Sets the color table for videos with indexed color models. + * + * @param track The track number. + * @param icm IndexColorModel. Specify null to use the standard Macintosh + * color table. + */ + public void setVideoColorTable(int track, ColorModel icm) { + if (icm instanceof IndexColorModel) { + VideoTrack t = (VideoTrack) tracks.get(track); + t.videoColorTable = (IndexColorModel) icm; + } + } + + /** + * Gets the preferred color table for displaying the movie on devices that + * support only 256 colors. + * + * @param track The track number. + * @return The color table or null, if the video uses the standard Macintosh + * color table. + */ + public IndexColorModel getVideoColorTable(int track) { + VideoTrack t = (VideoTrack) tracks.get(track); + return t.videoColorTable; + } + + /** + * Sets the edit list for the specified track.

In the absence of an edit + * list, the presentation of the track starts immediately. An empty edit is + * used to offset the start time of a track.

+ * + * @throws IllegalArgumentException If the edit list ends with an empty + * edit. + */ + public void setEditList(int track, Edit[] editList) { + if (editList != null && editList.length > 0 && editList[editList.length - 1].mediaTime == -1) { + throw new IllegalArgumentException("Edit list must not end with empty edit."); + } + tracks.get(track).editList = editList; + } + + /** + * Adds a video track. + * + * @param compressionType The QuickTime "image compression format" + * 4-Character code. A list of supported 4-Character codes is given in qtff, + * table 3-1, page 96. + * @param compressorName The QuickTime compressor name. Can be up to 32 + * characters long. + * @param timeScale The media time scale between 1 and 2^32. + * @param width The width of a video frame. + * @param height The height of a video frame. + * @param depth The number of bits per pixel. + * @param syncInterval Interval for sync-samples. 0=automatic. 1=all frames + * are keyframes. Values larger than 1 specify that for every n-th frame is + * a keyframe. Apple's QuickTime will not work properly if there is not at + * least one keyframe every second. + * @param format + * @return Returns the track index. + * @throws IllegalArgumentException if {@code width} or {@code height} is + * smaller than 1, if the length of {@code compressionType} is not equal to + * 4, if the length of the {@code compressorName} is not between 1 and 32, + * if the tiimeScale is not between 1 and 2^32. + */ + public int addVideoTrack(String compressionType, String compressorName, long timeScale, int width, int height, int depth, int syncInterval, Format format) throws IOException { + ensureStarted(); + if (compressionType == null || compressionType.length() != 4) { + throw new IllegalArgumentException("compressionType must be 4 characters long:" + compressionType); + } + if (compressorName == null || compressorName.isEmpty() || compressorName.length() > 32) { + throw new IllegalArgumentException("compressorName must be between 1 and 32 characters long:" + (compressorName == null ? "null" : "\"" + compressorName + "\"")); + } + if (timeScale < 1 || timeScale > (2L << 32)) { + throw new IllegalArgumentException("timeScale must be between 1 and 2^32:" + timeScale); + } + if (width < 1 || height < 1) { + throw new IllegalArgumentException("Width and height must be greater than 0, width:" + width + " height:" + height); + } + + VideoTrack t = new VideoTrack(); + t.mediaCompressionType = compressionType; + t.mediaCompressorName = compressorName; + t.mediaTimeScale = timeScale; + t.width = width; + t.height = height; + t.videoDepth = depth; + t.syncInterval = syncInterval; + t.format = format.prepend(VideoFormatKeys.DataClassKey, byte[].class); + tracks.add(t); + return tracks.size() - 1; + } + + /** + * Adds an audio track. + * + * @param compressionType The QuickTime 4-character code. A list of + * supported 4-Character codes is given in qtff, table 3-7, page 113. + * @param timeScale The media time scale between 1 and 2^32. + * @param sampleRate The sample rate. The integer portion must match the + * {@code timeScale}. + * @param numberOfChannels The number of channels: 1 for mono, 2 for stereo. + * @param sampleSizeInBits The number of bits in a sample: 8 or 16. + * @param isCompressed Whether the sound is compressed. + * @param frameDuration The frame duration, expressed in the media’s + * timescale, where the timescale is equal to the sample rate. For + * uncompressed formats, this field is always 1. + * @param soundBytesPerPacket For uncompressed audio, the number of bytes in a sample + * for a single channel (sampleSize divided by 8). For compressed audio, the + * number of bytes in a frame. + * @return Returns the track index. + * @throws IllegalArgumentException if the audioFormat is not 4 characters + * long, if the time scale is not between 1 and 2^32, if the integer portion + * of the sampleRate is not equal to the timeScale, if numberOfChannels is + * not 1 or 2. + */ + public int addAudioTrack(String compressionType, // + long timeScale, double sampleRate, // + int numberOfChannels, int sampleSizeInBits, // + boolean isCompressed, // + int frameDuration, int soundBytesPerPacket, boolean signed, ByteOrder byteOrder) throws IOException { + ensureStarted(); + if (compressionType == null || compressionType.length() != 4) { + throw new IllegalArgumentException("audioFormat must be 4 characters long:" + compressionType); + } + if (timeScale < 1 || timeScale > (2L << 32)) { + throw new IllegalArgumentException("timeScale must be between 1 and 2^32:" + timeScale); + } + if (timeScale != (int) Math.floor(sampleRate)) { + throw new IllegalArgumentException("timeScale: " + timeScale + " must match integer portion of sampleRate: " + sampleRate); + } + if (numberOfChannels != 1 && numberOfChannels != 2) { + throw new IllegalArgumentException("numberOfChannels must be 1 or 2: " + numberOfChannels); + } + if (sampleSizeInBits != 8 && sampleSizeInBits != 16) { + throw new IllegalArgumentException("sampleSize must be 8 or 16: " + numberOfChannels); + } + + AudioTrack t = new AudioTrack(); + t.mediaCompressionType = compressionType; + t.mediaTimeScale = timeScale; + t.soundSampleRate = sampleRate; + t.soundCompressionId = isCompressed ? -2 : -1; + t.soundNumberOfChannels = numberOfChannels; + t.soundSampleSize = sampleSizeInBits; + t.soundSamplesPerPacket = frameDuration; + if (isCompressed) { + t.soundBytesPerPacket = soundBytesPerPacket; + t.soundBytesPerFrame = soundBytesPerPacket * numberOfChannels; + } else { + t.soundBytesPerPacket = soundBytesPerPacket; + t.soundBytesPerFrame = soundBytesPerPacket * numberOfChannels; + } + t.soundBytesPerSample = sampleSizeInBits / 8; + + t.format = new Format( + DataClassKey, byte[].class, + MediaTypeKey, MediaType.AUDIO, + MimeTypeKey, MIME_QUICKTIME, + EncodingKey, compressionType, + SampleRateKey, Rational.valueOf(sampleRate), + SampleSizeInBitsKey, sampleSizeInBits, + ChannelsKey, numberOfChannels, + FrameSizeKey, soundBytesPerPacket, + SampleRateKey, Rational.valueOf(sampleRate), + SignedKey, signed, + ByteOrderKey, byteOrder); + tracks.add(t); + return tracks.size() - 1; + } + + /** + * Sets the compression quality of a track.

A value of 0 stands for + * "high compression is important" a value of 1 for "high image quality is + * important".

Changing this value affects the encoding of video frames + * which are subsequently written into the track. Frames which have already + * been written are not changed.

This value has no effect on videos + * encoded with lossless encoders such as the PNG format.

The default + * value is 0.97. + * + * @param newValue the new value + */ + public void setCompressionQuality(int track, float newValue) { + VideoTrack vt = (VideoTrack) tracks.get(track); + vt.videoQuality = newValue; + } + + /** + * Returns the compression quality of a track. + * + * @return compression quality + */ + public float getCompressionQuality(int track) { + return ((VideoTrack) tracks.get(track)).videoQuality; + } + + /** + * Sets the sync interval for the specified video track. + * + * @param track The track number. + * @param i Interval between sync samples (keyframes). 0 = automatic. 1 = + * write all samples as sync samples. n = sync every n-th sample. + */ + public void setSyncInterval(int track, int i) { + tracks.get(track).syncInterval = i; + } + + /** + * Gets the sync interval from the specified video track. + */ + public int getSyncInterval(int track) { + return tracks.get(track).syncInterval; + } + + /** + * Sets the creation time of the movie. + */ + public void setCreationTime(Instant creationTime) { + this.creationTime = creationTime; + } + + /** + * Gets the creation time of the movie. + */ + public Instant getCreationTime() { + return creationTime; + } + + /** + * Sets the modification time of the movie. + */ + public void setModificationTime(Instant modificationTime) { + this.modificationTime = modificationTime; + } + + /** + * Gets the modification time of the movie. + */ + public Instant getModificationTime() { + return modificationTime; + } + + /** + * Gets the preferred rate at which to play this movie. A value of 1.0 + * indicates normal rate. + */ + public double getPreferredRate() { + return preferredRate; + } + + /** + * Sets the preferred rate at which to play this movie. A value of 1.0 + * indicates normal rate. + */ + public void setPreferredRate(double preferredRate) { + this.preferredRate = preferredRate; + } + + /** + * Gets the preferred volume of this movie’s sound. A value of 1.0 indicates + * full volume. + */ + public double getPreferredVolume() { + return preferredVolume; + } + + /** + * Sets the preferred volume of this movie’s sound. A value of 1.0 indicates + * full volume. + */ + public void setPreferredVolume(double preferredVolume) { + this.preferredVolume = preferredVolume; + } + + /** + * Gets the time value for current time position within the movie. + */ + public long getCurrentTime() { + return currentTime; + } + + /** + * Sets the time value for current time position within the movie. + */ + public void setCurrentTime(long currentTime) { + this.currentTime = currentTime; + } + + /** + * Gets the time value of the time of the movie poster. + */ + public long getPosterTime() { + return posterTime; + } + + /** + * Sets the time value of the time of the movie poster. + */ + public void setPosterTime(long posterTime) { + this.posterTime = posterTime; + } + + /** + * Gets the duration of the movie preview in movie time scale units. + */ + public long getPreviewDuration() { + return previewDuration; + } + + /** + * Gets the duration of the movie preview in movie time scale units. + */ + public void setPreviewDuration(long previewDuration) { + this.previewDuration = previewDuration; + } + + /** + * Gets the time value in the movie at which the preview begins. + */ + public long getPreviewTime() { + return previewTime; + } + + /** + * The time value in the movie at which the preview begins. + */ + public void setPreviewTime(long previewTime) { + this.previewTime = previewTime; + } + + /** + * The duration of the current selection in movie time scale units. + */ + public long getSelectionDuration() { + return selectionDuration; + } + + /** + * The duration of the current selection in movie time scale units. + */ + public void setSelectionDuration(long selectionDuration) { + this.selectionDuration = selectionDuration; + } + + /** + * The time value for the start time of the current selection. + */ + public long getSelectionTime() { + return selectionTime; + } + + /** + * The time value for the start time of the current selection. + */ + public void setSelectionTime(long selectionTime) { + this.selectionTime = selectionTime; + } + + /** + * Sets the transformation matrix of the entire movie. + *

+     * {a, b, u,
+     *  c, d, v,
+     *  tx,ty,w} // X- and Y-Translation
+     *
+     *           [ a  b  u
+     * [x y 1] *   c  d  v   = [x' y' 1]
+     * 
tx ty w ] + * + * @param matrix The transformation matrix. + */ + public void setMovieTransformationMatrix(double[] matrix) { + if (matrix.length != 9) { + throw new IllegalArgumentException("matrix must have 9 elements, matrix.length=" + matrix.length); + } + + System.arraycopy(matrix, 0, movieMatrix, 0, 9); + } + + /** + * Gets the transformation matrix of the entire movie. + * + * @return The transformation matrix. + */ + public double[] getMovieTransformationMatrix() { + return movieMatrix.clone(); + } + + /** + * Sets the transformation matrix of the specified track. + *
+     * {a, b, u,
+     *  c, d, v,
+     *  tx,ty,w} // X- and Y-Translation
+     *
+     *           [ a  b  u
+     * [x y 1] *   c  d  v   = [x' y' 1]
+     * 
tx ty w ] + * + * @param track The track number. + * @param matrix The transformation matrix. + */ + public void setTransformationMatrix(int track, double[] matrix) { + if (matrix.length != 9) { + throw new IllegalArgumentException("matrix must have 9 elements, matrix.length=" + matrix.length); + } + + System.arraycopy(matrix, 0, tracks.get(track).matrix, 0, 9); + } + + /** + * Gets the transformation matrix of the specified track. + * + * @param track The track number. + * @return The transformation matrix. + */ + public double[] getTransformationMatrix(int track) { + return tracks.get(track).matrix.clone(); + } + + /** + * Sets the state of the MP4Writer to started.

If the state is + * changed by this method, the prolog is written. + */ + protected void ensureStarted() throws IOException { + ensureOpen(); + if (state == States.FINISHED) { + throw new IOException("Can not write into finished movie."); + } + if (state != States.STARTED) { + writeProlog(); + mdatAtom = new WideDataAtom("mdat"); + state = States.STARTED; + } + } + + /** + * Writes an {@link AvcDecoderConfigurationRecord} into the track. + * + * @param track the track index + * @param r the record + */ + public void writeAvcDecoderConfigurationRecord(int track, AvcDecoderConfigurationRecord r) { + Track t = tracks.get(track); // throws index out of bounds exception if illegal track index + if (t instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) t; + AvcDecoderConfigurationRecord record = vt.avcDecoderConfigurationRecord; + if (record == null) { + record = new AvcDecoderConfigurationRecord(r.avcProfileIndication(), + r.profileCompatibility(), r.avcLevelIndication(), r.nalLengthSize(), + r.sequenceParameterSetNALUnit(), r.pictureParameterSetNALUnit()); + } else { + var pps = new LinkedHashSet<>(record.pictureParameterSetNALUnit()); + pps.addAll(r.pictureParameterSetNALUnit()); + var sps = new LinkedHashSet<>(record.sequenceParameterSetNALUnit()); + pps.addAll(r.sequenceParameterSetNALUnit()); + record = new AvcDecoderConfigurationRecord(r.avcProfileIndication(), + r.profileCompatibility(), r.avcLevelIndication(), r.nalLengthSize(), + pps, sps); + } + vt.avcDecoderConfigurationRecord = record; + } + } + + /** + * Writes an already encoded sample from a file into a track.

This + * method does not inspect the contents of the samples. The contents has to + * match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param file The file which holds the encoded data sample. + * @param duration The duration of the sample in media time scale units. + * @param isSync whether the sample is a sync sample (key frame). + * @throws IOException if writing the sample data failed. + */ + public void writeSample(int track, File file, long duration, boolean isSync) throws IOException { + ensureStarted(); + FileInputStream in = null; + try { + in = new FileInputStream(file); + writeSample(track, in, duration, isSync); + } finally { + if (in != null) { + in.close(); + } + } + } + + /** + * Writes an already encoded sample from an input stream into a track.

+ * This method does not inspect the contents of the samples. The contents + * have to match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param in The input stream which holds the encoded sample data. + * @param duration The duration of the video frame in media time scale + * units. + * @param isSync Whether the sample is a sync sample (keyframe). + * @throws IllegalArgumentException if the duration is less than 1. + * @throws IOException if writing the sample data failed. + */ + public void writeSample(int track, InputStream in, long duration, boolean isSync) throws IOException { + ensureStarted(); + if (duration <= 0) { + throw new IllegalArgumentException("duration must be greater 0"); + } + Track t = tracks.get(track); // throws index out of bounds exception if illegal track index + ensureOpen(); + ensureStarted(); + long offset = getRelativeStreamPosition(); + QTFFImageOutputStream mdatOut = mdatAtom.getOutputStream(); + IOStreams.copy(in, mdatOut); + long length = getRelativeStreamPosition() - offset; + t.addSample(new Sample(duration, offset, length), 1, isSync); + } + + /** + * Writes an already encoded sample from a byte array into a track.

This + * method does not inspect the contents of the samples. The contents has to + * match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param data The encoded sample data. + * @param duration The duration of the sample in media time scale units. + * @param isSync Whether the sample is a sync sample. + * @throws IllegalArgumentException if the duration is less than 1. + * @throws IOException if writing the sample data failed. + */ + public void writeSample(int track, byte[] data, long duration, boolean isSync) throws IOException { + writeSample(track, data, 0, data.length, duration, isSync); + } + + /** + * Writes an already encoded sample from a byte array into a track.

This + * method does not inspect the contents of the samples. The contents has to + * match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param data The encoded sample data. + * @param off The start offset in the data. + * @param len The number of bytes to write. + * @param duration The duration of the sample in media time scale units. + * @param isSync Whether the sample is a sync sample (keyframe). + * @throws IllegalArgumentException if the duration is less than 1. + * @throws IOException if writing the sample data failed. + */ + public void writeSample(int track, byte[] data, int off, int len, long duration, boolean isSync) throws IOException { + ensureStarted(); + if (duration <= 0) { + throw new IllegalArgumentException("duration must be greater 0"); + } + Track t = tracks.get(track); // throws index out of bounds exception if illegal track index + ensureOpen(); + ensureStarted(); + long offset = getRelativeStreamPosition(); + ImageOutputStream mdatOut = mdatAtom.getOutputStream(); + mdatOut.write(data, off, len); + t.addSample(new Sample(duration, offset, len), 1, isSync); + } + + /** + * Writes multiple sync samples from a byte array into a track.

This + * method does not inspect the contents of the samples. The contents has to + * match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param sampleCount The number of samples. + * @param data The encoded sample data. The length of data must be dividable + * by sampleCount. + * @param sampleDuration The duration of a sample. All samples must have the + * same duration. + * @throws IllegalArgumentException if {@code sampleDuration} is less than 1 + * or if the length of {@code data} is not dividable by {@code sampleCount}. + * @throws IOException if writing the chunk failed. + */ + public void writeSamples(int track, int sampleCount, byte[] data, long sampleDuration, boolean isSync) throws IOException { + writeSamples(track, sampleCount, data, 0, data.length, sampleDuration, isSync); + } + + /** + * Writes multiple sync samples from a byte array into a track.

This + * method does not inspect the contents of the samples. The contents has to + * match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param sampleCount The number of samples. + * @param data The encoded sample data. + * @param off The start offset in the data. + * @param len The number of bytes to write. Must be dividable by + * sampleCount. + * @param sampleDuration The duration of a sample. All samples must have the + * same duration. + * @throws IllegalArgumentException if the duration is less than 1. + * @throws IOException if writing the sample data failed. + */ + public void writeSamples(int track, int sampleCount, byte[] data, int off, int len, long sampleDuration) throws IOException { + writeSamples(track, sampleCount, data, off, len, sampleDuration, true); + } + + /** + * Writes multiple samples from a byte array into a track.

This method + * does not inspect the contents of the data. The contents has to match the + * format and dimensions of the media in this track. + * + * @param track The track index. + * @param sampleCount The number of samples. + * @param data The encoded sample data. + * @param off The start offset in the data. + * @param len The number of bytes to write. Must be dividable by + * sampleCount. + * @param sampleDuration The duration of a sample. All samples must have the + * same duration. + * @param isSync Whether the samples are sync samples. All samples must + * either be sync samples or non-sync samples. + * @throws IllegalArgumentException if the duration is less than 1. + * @throws IOException if writing the sample data failed. + */ + public void writeSamples(int track, int sampleCount, byte[] data, int off, int len, long sampleDuration, boolean isSync) throws IOException { + ensureStarted(); + if (sampleDuration <= 0) { + throw new IllegalArgumentException("sampleDuration must be greater 0, sampleDuration=" + sampleDuration + " track=" + track); + } + if (sampleCount <= 0) { + throw new IllegalArgumentException("sampleCount must be greater 0, sampleCount=" + sampleCount + " track=" + track); + } + if (len % sampleCount != 0) { + throw new IllegalArgumentException("len must be divisable by sampleCount len=" + len + " sampleCount=" + sampleCount + " track=" + track); + } + Track t = tracks.get(track); // throws index out of bounds exception if illegal track index + ensureOpen(); + ensureStarted(); + long offset = getRelativeStreamPosition(); + ImageOutputStream mdatOut = mdatAtom.getOutputStream(); + mdatOut.write(data, off, len); + + + int sampleLength = len / sampleCount; + Sample first = new Sample(sampleDuration, offset, sampleLength); + Sample last = new Sample(sampleDuration, offset + sampleLength * (sampleCount - 1), sampleLength); + t.addChunk(new Chunk(first, last, sampleCount, 1), isSync); + } + + /** + * Returns true if the limit for media samples has been reached. If this + * limit is reached, no more samples should be added to the movie.

+ * QuickTime files can be up to 64 TB long, but there are other values that + * may overflow before this size is reached. This method returns true when + * the files size exceeds 2^60 or when the media duration value of a track + * exceeds 2^61. + */ + public boolean isDataLimitReached() { + try { + long maxMediaDuration = 0; + for (Track t : tracks) { + maxMediaDuration = max(t.mediaDuration, maxMediaDuration); + } + + return getRelativeStreamPosition() > (1L << 61) // + || maxMediaDuration > 1L << 61; + } catch (IOException ex) { + return true; + } + } + + /** + * Closes the movie file as well as the stream being filtered. + * + * @throws IOException if an I/O error has occurred + */ + public void close() throws IOException { + try { + if (state == States.STARTED) { + finish(); + } + } finally { + if (state != States.CLOSED) { + out.close(); + state = States.CLOSED; + } + } + } + + /** + * Finishes writing the contents of the QuickTime output stream without + * closing the underlying stream. Use this method when applying multiple + * filters in succession to the same output stream. + * + * @throws IllegalStateException if the dimension of the video track has + * not been specified or determined yet. + * @throws IOException if an I/O exception has occurred + */ + public void finish() throws IOException { + ensureOpen(); + if (state != States.FINISHED) { + for (int i = 0, n = tracks.size(); i < n; i++) { + } + mdatAtom.finish(); + writeEpilog(); + state = States.FINISHED; + /* + for (int i = 0, n = tracks.size(); i < n; i++) { + if (tracks.get(i) instanceof VideoTrack) { + VideoTrack t = (VideoTrack) tracks.get(i); + t.videoWidth = t.videoHeight = -1; + } + }*/ + } + } + + /** + * Check to make sure that this stream has not been closed + */ + protected void ensureOpen() throws IOException { + if (state == States.CLOSED) { + throw new IOException("Stream closed"); + } + } + + /** + * Writes the stream prolog. + */ + private void writeProlog() throws IOException { + /* File type atom + * + typedef struct { + magic brand; + bcd4 versionYear; + bcd2 versionMonth; + bcd2 versionMinor; + magic[4] compatibleBrands; + } ftypAtom; + */ + DataAtom ftypAtom = new DataAtom("ftyp"); + QTFFImageOutputStream d = ftypAtom.getOutputStream(); + d.writeType("isom"); // brand + d.writeBCD4(2005); // versionYear + d.writeBCD2(3); // versionMonth + d.writeBCD2(0); // versionMinor + d.writeType("isom"); // compatibleBrands + d.writeType("iso2"); // compatibleBrands + d.writeType("avc1"); // compatibleBrands + d.writeType("mp41"); // compatibleBrands + ftypAtom.finish(); + } + + private void writeEpilog() throws IOException { + long duration = getMovieDuration(); + + DataAtom leaf; + + /* Movie Atom ========= */ + moovAtom = new CompositeAtom("moov"); + + /* Movie Header Atom ------------- + * The data contained in this atom defines characteristics of the entire + * QuickTime movie, such as time scale and duration. It has an atom type + * value of 'mvhd'. + * + * typedef struct { + byte version; + byte[3] flags; + mactimestamp creationTime; + mactimestamp modificationTime; + int timeScale; + int duration; + fixed16d16 preferredRate; + fixed8d8 preferredVolume; + byte[10] reserved; + fixed16d16 matrixA; + fixed16d16 matrixB; + fixed2d30 matrixU; + fixed16d16 matrixC; + fixed16d16 matrixD; + fixed2d30 matrixV; + fixed16d16 matrixX; + fixed16d16 matrixY; + fixed2d30 matrixW; + int previewTime; + int previewDuration; + int posterTime; + int selectionTime; + int selectionDuration; + int currentTime; + int nextTrackId; + } movieHeaderAtom; + */ + leaf = new DataAtom("mvhd"); + moovAtom.add(leaf); + QTFFImageOutputStream d = leaf.getOutputStream(); + d.writeByte(0); // version + // A 1-byte specification of the version of this movie header atom. + + d.writeByte(0); // flags[0] + d.writeByte(0); // flags[1] + d.writeByte(0); // flags[2] + // Three bytes of space for future movie header flags. + + d.writeMacTimestamp(creationTime); // creationTime + // A 32-bit integer that specifies the calendar date and time (in + // seconds since midnight, January 1, 1904) when the movie atom was + // created. It is strongly recommended that this value should be + // specified using coordinated universal time (UTC). + + d.writeMacTimestamp(modificationTime); // modificationTime + // A 32-bit integer that specifies the calendar date and time (in + // seconds since midnight, January 1, 1904) when the movie atom was + // changed. BooleanIt is strongly recommended that this value should be + // specified using coordinated universal time (UTC). + + d.writeUInt(movieTimeScale); // timeScale + // A time value that indicates the time scale for this movie—that is, + // the number of time units that pass per second in its time coordinate + // system. A time coordinate system that measures time in sixtieths of a + // second, for example, has a time scale of 60. + + d.writeUInt(duration); // duration + // A time value that indicates the duration of the movie in time scale + // units. Note that this property is derived from the movie’s tracks. + // The value of this field corresponds to the duration of the longest + // track in the movie. + + d.writeFixed16D16(preferredRate); // preferredRate + // A 32-bit fixed-point number that specifies the rate at which to play + // this movie. A value of 1.0 indicates normal rate. + + d.writeFixed8D8(preferredVolume); // preferredVolume + // A 16-bit fixed-point number that specifies how loud to play this + // movie’s sound. A value of 1.0 indicates full volume. + + d.write(new byte[10]); // reserved; + // Ten bytes reserved for use by Apple. Set to 0. + + d.writeFixed16D16(movieMatrix[0]); // matrix[0] + d.writeFixed16D16(movieMatrix[1]); // matrix[1] + d.writeFixed2D30(movieMatrix[2]); // matrix[2] + d.writeFixed16D16(movieMatrix[3]); // matrix[3] + d.writeFixed16D16(movieMatrix[4]); // matrix[4] + d.writeFixed2D30(movieMatrix[5]); // matrix[5] + d.writeFixed16D16(movieMatrix[6]); // matrix[6] + d.writeFixed16D16(movieMatrix[7]); // matrix[7] + d.writeFixed2D30(movieMatrix[8]); // matrix[8] + + // The matrix structure associated with this movie. A matrix shows how + // to map points from one coordinate space into another. See “Matrices” + // for a discussion of how display matrices are used in QuickTime: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap4/chapter_5_section_4.html#//apple_ref/doc/uid/TP40000939-CH206-18737 + + d.writeUInt(previewTime); // previewTime + // The time value in the movie at which the preview begins. + + d.writeUInt(previewDuration); // previewDuration + // The duration of the movie preview in movie time scale units. + + d.writeUInt(posterTime); // posterTime + // The time value of the time of the movie poster. + + d.writeUInt(selectionTime); // selectionTime + // The time value for the start time of the current selection. + + d.writeUInt(selectionDuration); // selectionDuration + // The duration of the current selection in movie time scale units. + + d.writeUInt(currentTime); // currentTime; + // The time value for current time position within the movie. + + d.writeUInt(tracks.size() + 1); // nextTrackId + // A 32-bit integer that indicates a value to use for the track ID + // number of the next track added to this movie. Note that 0 is not a + // valid track ID value. + + for (int i = 0, n = tracks.size(); i < n; i++) { + /* Track Atom ======== */ + writeTrackAtoms(i, moovAtom, modificationTime); + } + + // Optional color table atom + for (Track t : tracks) { + if (t instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) t; + if (vt.videoColorTable != null) { + writeVideoColorTableAtom(vt, moovAtom); + break; + } + } + } + + + // + moovAtom.finish(); + } + + protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Instant modificationTime) throws IOException { + Track t = tracks.get(trackIndex); + + DataAtom leaf; + QTFFImageOutputStream d; + + /* Track Atom ======== */ + CompositeAtom trakAtom = new CompositeAtom("trak"); + moovAtom.add(trakAtom); + + /* Track Header Atom ----------- + * The track header atom specifies the characteristics of a single track + * within a movie. A track header atom contains a size field that + * specifies the number of bytes and a type field that indicates the + * format of the data (defined by the atom type 'tkhd'). + * + typedef struct { + byte version; + byte flag0; + byte flag1; + byte set TrackHeaderFlags flag2; + mactimestamp creationTime; + mactimestamp modificationTime; + int trackId; + byte[4] reserved; + int duration; + byte[8] reserved; + short layer; + short alternateGroup; + short volume; + byte[2] reserved; + int[9] matrix; + int trackWidth; + int trackHeight; + } trackHeaderAtom; */ + leaf = new DataAtom("tkhd"); + trakAtom.add(leaf); + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this track header. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(t.headerFlags); // flag[2] + // Three bytes that are reserved for the track header flags. These flags + // indicate how the track is used in the movie. The following flags are + // valid (all flags are enabled when set to 1): + // + // Track enabled + // Indicates that the track is enabled. Flag value is 0x0001. + // Track in movie + // Indicates that the track is used in the movie. Flag value is + // 0x0002. + // Track in preview + // Indicates that the track is used in the movie’s preview. Flag + // value is 0x0004. + // Track in poster + // Indicates that the track is used in the movie’s poster. Flag + // value is 0x0008. + + d.writeMacTimestamp(creationTime); // creationTime + // A 32-bit integer that indicates the calendar date and time (expressed + // in seconds since midnight, January 1, 1904) when the track header was + // created. It is strongly recommended that this value should be + // specified using coordinated universal time (UTC). + + d.writeMacTimestamp(modificationTime); // modificationTime + // A 32-bit integer that indicates the calendar date and time (expressed + // in seconds since midnight, January 1, 1904) when the track header was + // changed. It is strongly recommended that this value should be + // specified using coordinated universal time (UTC). + + d.writeInt(trackIndex + 1); // trackId + // A 32-bit integer that uniquely identifies the track. The value 0 + // cannot be used. + + d.writeInt(0); // reserved; + // A 32-bit integer that is reserved for use by Apple. Set this field to 0. + + d.writeUInt(t.getTrackDuration(movieTimeScale)); // duration + // A time value that indicates the duration of this track (in the + // movie’s time coordinate system). Note that this property is derived + // from the track’s edits. The value of this field is equal to the sum + // of the durations of all of the track’s edits. If there is no edit + // list, then the duration is the sum of the sample durations, converted + // into the movie timescale. + + d.writeLong(0); // reserved + // An 8-byte value that is reserved for use by Apple. Set this field to 0. + + d.writeShort(0); // layer; + // A 16-bit integer that indicates this track’s spatial priority in its + // movie. The QuickTime Movie Toolbox uses this value to determine how + // tracks overlay one another. Tracks with lower layer values are + // displayed in front of tracks with higher layer values. + + d.writeShort(0); // alternate group + // A 16-bit integer that specifies a collection of movie tracks that + // contain alternate data for one another. QuickTime chooses one track + // from the group to be used when the movie is played. The choice may be + // based on such considerations as playback quality, language, or the + // capabilities of the computer. + + d.writeFixed8D8(t.mediaType == MediaType.AUDIO ? 1 : 0); // volume + // A 16-bit fixed-point value that indicates how loudly this track’s + // sound is to be played. A value of 1.0 indicates normal volume. + + d.writeShort(0); // reserved + // A 16-bit integer that is reserved for use by Apple. Set this field to 0. + + double[] m = t.matrix; + d.writeFixed16D16(m[0]); // matrix[0] + d.writeFixed16D16(m[1]); // matrix[1] + d.writeFixed2D30(m[2]); // matrix[2] + d.writeFixed16D16(m[3]); // matrix[3] + d.writeFixed16D16(m[4]); // matrix[4] + d.writeFixed2D30(m[5]); // matrix[5] + d.writeFixed16D16(m[6]); // matrix[6] + d.writeFixed16D16(m[7]); // matrix[7] + d.writeFixed2D30(m[8]); // matrix[8] + // The matrix structure associated with this track. + // See Figure 2-8 for an illustration of a matrix structure: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap2/chapter_3_section_3.html#//apple_ref/doc/uid/TP40000939-CH204-32967 + + d.writeFixed16D16(t.mediaType == MediaType.VIDEO ? t.width : 0); // width + // A 32-bit fixed-point number that specifies the width of this track in pixels. + + d.writeFixed16D16(t.mediaType == MediaType.VIDEO ? t.height : 0); // height + // A 32-bit fixed-point number that indicates the height of this track in pixels. + + /* Edit Atom ========= */ + CompositeAtom edtsAtom = new CompositeAtom("edts"); + trakAtom.add(edtsAtom); + + /* Edit List atom ------- */ + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + editListTable editListTable[numberOfEntries]; + } editListAtom; + + typedef struct { + int trackDuration; + int mediaTime; + fixed16d16 mediaRate; + } editListTable; + */ + leaf = new DataAtom("elst"); + edtsAtom.add(leaf); + d = leaf.getOutputStream(); + + d.write(0); // version + // One byte that specifies the version of this header atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + + Edit[] elist = t.editList; + if (elist == null || elist.length == 0) { + d.writeUInt(1); // numberOfEntries + d.writeUInt(t.getTrackDuration(movieTimeScale)); // trackDuration + d.writeUInt(t.getFirstSampleTime(movieTimeScale)); // mediaTime + d.writeFixed16D16(1); // mediaRate + } else { + d.writeUInt(elist.length); // numberOfEntries + for (Edit edit : elist) { + d.writeUInt(edit.trackDuration); // trackDuration + d.writeUInt(edit.mediaTime); // mediaTime + d.writeUInt(edit.mediaRate); // mediaRate + } + } + + + /* Media Atom ========= */ + CompositeAtom mdiaAtom = new CompositeAtom("mdia"); + trakAtom.add(mdiaAtom); + + /* Media Header atom ------- + typedef struct { + byte version; + byte[3] flags; + mactimestamp creationTime; + mactimestamp modificationTime; + int timeScale; + int duration; + short language; + short quality; + } mediaHeaderAtom;*/ + leaf = new DataAtom("mdhd"); + mdiaAtom.add(leaf); + d = leaf.getOutputStream(); + d.write(0); // version + // One byte that specifies the version of this header atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // Three bytes of space for media header flags. Set this field to 0. + + d.writeMacTimestamp(creationTime); // creationTime + // A 32-bit integer that specifies (in seconds since midnight, January + // 1, 1904) when the media atom was created. It is strongly recommended + // that this value should be specified using coordinated universal time + // (UTC). + + d.writeMacTimestamp(modificationTime); // modificationTime + // A 32-bit integer that specifies (in seconds since midnight, January + // 1, 1904) when the media atom was changed. It is strongly recommended + // that this value should be specified using coordinated universal time + // (UTC). + + d.writeUInt(t.mediaTimeScale); // timeScale + // A time value that indicates the time scale for this media—that is, + // the number of time units that pass per second in its time coordinate + // system. + + d.writeUInt(t.mediaDuration); // duration + // The duration of this media in units of its time scale. + + d.writeShort(0); // language; + // A 16-bit integer that specifies the language code for this media. + // See “Language Code Values” for valid language codes: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap4/chapter_5_section_2.html#//apple_ref/doc/uid/TP40000939-CH206-27005 + + d.writeShort(0); // quality + // A 16-bit integer that specifies the media’s playback quality—that is, + // its suitability for playback in a given environment. + + /* + * Media Handler Reference Atom ------- + */ + leaf = new DataAtom("hdlr"); + mdiaAtom.add(leaf); + /*typedef struct { + byte version; + byte[3] flags; + magic componentType; + magic componentSubtype; + magic componentManufacturer; + int componentFlags; + int componentFlagsMask; + cstring componentName; + ubyte[] extraData; + } handlerReferenceAtom; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this handler information. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for handler information flags. Set this field to 0. + + d.writeType("mhlr"); // componentType + // A four-character code that identifies the type of the handler. Only + // two values are valid for this field: 'mhlr' for media handlers and + // 'dhlr' for data handlers. + + d.writeType(t.mediaType == MediaType.VIDEO ? "vide" : "soun"); // componentSubtype + // A four-character code that identifies the type of the media handler + // or data handler. For media handlers, this field defines the type of + // data—for example, 'vide' for video data or 'soun' for sound data. + // + // For data handlers, this field defines the data reference type—for + // example, a component subtype value of 'alis' identifies a file alias. + + + d.writeType(t.componentManufacturer); // componentManufacturer + + d.writeUInt(t.mediaType == MediaType.AUDIO ? 268435456L : 0); // componentFlags + // Reserved. Set to 0. + + d.writeUInt(t.mediaType == MediaType.AUDIO ? 65941 : 0); // componentFlagsMask + // Reserved. Set to 0. + + d.writeCString(t.mediaType == MediaType.AUDIO ? "Apple Sound Media Handler" : ""); // componentName (empty string) + // A (counted) string that specifies the name of the component—that is, + // the media handler used when this media was created. This field may + // contain a zero-length (empty) string. + + /* Media Information atom ========= */ + writeMediaInformationAtoms(trackIndex, mdiaAtom); + } + + protected void writeMediaInformationAtoms(int trackIndex, CompositeAtom mdiaAtom) throws IOException { + Track t = tracks.get(trackIndex); + DataAtom leaf; + QTFFImageOutputStream d; + /* Media Information atom ========= */ + CompositeAtom minfAtom = new CompositeAtom("minf"); + mdiaAtom.add(minfAtom); + + /* Video or Audio media information atom -------- */ + switch (t.mediaType) { + case VIDEO -> writeVideoMediaInformationHeaderAtom(trackIndex, minfAtom); + case AUDIO -> writeSoundMediaInformationHeaderAtom(trackIndex, minfAtom); + default -> throw new UnsupportedOperationException("Media type " + t.mediaType + " not supported yet."); + } + + + /* Data Handler Reference Atom -------- */ + // The handler reference atom specifies the media handler component that + // is to be used to interpret the media’s data. The handler reference + // atom has an atom type value of 'hdlr'. + leaf = new DataAtom("hdlr"); + minfAtom.add(leaf); + /*typedef struct { + byte version; + byte[3] flags; + magic componentType; + magic componentSubtype; + magic componentManufacturer; + int componentFlags; + int componentFlagsMask; + cstring componentName; + ubyte[] extraData; + } handlerReferenceAtom; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this handler information. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for handler information flags. Set this field to 0. + + d.writeType("dhlr"); // componentType + // A four-character code that identifies the type of the handler. Only + // two values are valid for this field: 'mhlr' for media handlers and + // 'dhlr' for data handlers. + + d.writeType("alis"); // componentSubtype + // A four-character code that identifies the type of the media handler + // or data handler. For media handlers, this field defines the type of + // data—for example, 'vide' for video data or 'soun' for sound data. + // For data handlers, this field defines the data reference type—for + // example, a component subtype value of 'alis' identifies a file alias. + + if (t.mediaType == MediaType.AUDIO) { + d.writeType(t.componentManufacturer); + } else { + d.writeUInt(0); + } + // componentManufacturer + // Reserved. Set to 0. + + d.writeUInt(t.mediaType == MediaType.AUDIO ? 268435457L : 0); // componentFlags + // Reserved. Set to 0. + + d.writeInt(t.mediaType == MediaType.AUDIO ? 65967 : 0); // componentFlagsMask + // Reserved. Set to 0. + + d.writeCString(t.componentName); // componentName (empty string) + // A (counted) string that specifies the name of the component—that is, + // the media handler used when this media was created. This field may + // contain a zero-length (empty) string. + + /* Data information atom ===== */ + CompositeAtom dinfAtom = new CompositeAtom("dinf"); + minfAtom.add(dinfAtom); + + /* Data reference atom ----- */ + // Data reference atoms contain tabular data that instructs the data + // handler component how to access the media’s data. + leaf = new DataAtom("dref"); + dinfAtom.add(leaf); + /*typedef struct { + ubyte version; + ubyte[3] flags; + int numberOfEntries; + dataReferenceEntry dataReference[numberOfEntries]; + } dataReferenceAtom; + + set { + dataRefSelfReference=1 // I am not shure if this is the correct value for this flag + } drefEntryFlags; + + typedef struct { + int size; + magic type; + byte version; + ubyte flag1; + ubyte flag2; + ubyte set drefEntryFlags flag3; + byte[size - 12] data; + } dataReferenceEntry; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this data reference atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for data reference flags. Set this field to 0. + + d.writeInt(1); // numberOfEntries + // A 32-bit integer containing the count of data references that follow. + + d.writeInt(12); // dataReference.size + // A 32-bit integer that specifies the number of bytes in the data + // reference. + + d.writeType("alis"); // dataReference.type + // A 32-bit integer that specifies the type of the data in the data + // reference. Table 2-4 lists valid type values: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap2/chapter_3_section_4.html#//apple_ref/doc/uid/TP40000939-CH204-38840 + + d.write(0); // dataReference.version + // A 1-byte specification of the version of the data reference. + + d.write(0); // dataReference.flag1 + d.write(0); // dataReference.flag2 + d.write(0x1); // dataReference.flag3 + // A 3-byte space for data reference flags. There is one defined flag. + // + // Self reference + // This flag indicates that the media’s data is in the same file as + // the movie atom. On the Macintosh, and other file systems with + // multifork files, set this flag to 1 even if the data resides in + // a different fork from the movie atom. This flag’s value is + // 0x0001. + + + /* Sample Table atom ========= */ + writeSampleTableAtoms(trackIndex, minfAtom); + } + + protected void writeVideoMediaInformationHeaderAtom(int trackIndex, CompositeAtom minfAtom) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; + + /* Video media information atom -------- */ + leaf = new DataAtom("vmhd"); + minfAtom.add(leaf); + /*typedef struct { + byte version; + byte flag1; + byte flag2; + byte set vmhdFlags flag3; + short graphicsMode; + ushort[3] opcolor; + } videoMediaInformationHeaderAtom;*/ + d = leaf.getOutputStream(); + d.write(0); // version + // One byte that specifies the version of this header atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0x1); // flag[2] + // Three bytes of space for media header flags. + // This is a compatibility flag that allows QuickTime to distinguish + // between movies created with QuickTime 1.0 and newer movies. You + // should always set this flag to 1, unless you are creating a movie + // intended for playback using version 1.0 of QuickTime. This flag’s + // value is 0x0001. + + d.writeShort(0x40); // graphicsMode (0x40 = DitherCopy) + // A 16-bit integer that specifies the transfer mode. The transfer mode + // specifies which Boolean operation QuickDraw should perform when + // drawing or transferring an image from one location to another. + // See “Graphics Modes” for a list of graphics modes supported by + // QuickTime: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap4/chapter_5_section_5.html#//apple_ref/doc/uid/TP40000939-CH206-18741 + + d.writeUShort(0); // opcolor[0] + d.writeUShort(0); // opcolor[1] + d.writeUShort(0); // opcolor[2] + // Three 16-bit values that specify the red, green, and blue colors for + // the transfer mode operation indicated in the graphics mode field. + } + + protected void writeSoundMediaInformationHeaderAtom(int trackIndex, CompositeAtom minfAtom) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; + + /* Sound media information header atom -------- */ + leaf = new DataAtom("smhd"); + minfAtom.add(leaf); + /*typedef struct { + ubyte version; + ubyte[3] flags; + short balance; + short reserved; + } soundMediaInformationHeaderAtom;*/ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this sound media information header atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for sound media information flags. Set this field to 0. + + d.writeFixed8D8(0); // balance + // A 16-bit integer that specifies the sound balance of this + // sound media. Sound balance is the setting that controls + // the mix of sound between the two speakers of a computer. + // This field is normally set to 0. + // Balance values are represented as 16-bit, fixed-point + // numbers that range from -1.0 to +1.0. The high-order 8 + // bits contain the integer portion of the value; the + // low-order 8 bits contain the fractional part. Negative + // values weight the balance toward the left speaker; + // positive values emphasize the right channel. Setting the + // balance to 0 corresponds to a neutral setting. + + d.writeUShort(0); // reserved + // Reserved for use by Apple. Set this field to 0. + + } + + protected void writeSampleTableAtoms(int trackIndex, CompositeAtom minfAtom) throws IOException { + Track t = tracks.get(trackIndex); + DataAtom leaf; + QTFFImageOutputStream d; + + /* Sample Table atom ========= */ + CompositeAtom stblAtom = new CompositeAtom("stbl"); + minfAtom.add(stblAtom); + + /* Sample Description atom ------- */ + if (Objects.requireNonNull(t) instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) Objects.requireNonNull(t); + writeVideoSampleDescriptionAtom(vt, stblAtom); + } else if (t instanceof AudioTrack) { + AudioTrack at = (AudioTrack) t; + writeAudioSampleDescriptionAtom(at, stblAtom); + } else { + writeGenericSampleDescriptionAtom(t, stblAtom); + } + + + /* Time to Sample atom ---- */ + // Time-to-sample atoms store duration information for a media’s + // samples, providing a mapping from a time in a media to the + // corresponding data sample. The time-to-sample atom has an atom type + // of 'stts'. + leaf = new DataAtom("stts"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + timeToSampleTable timeToSampleTable[numberOfEntries]; + } timeToSampleAtom; + + typedef struct { + int sampleCount; + int sampleDuration; + } timeToSampleTable; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this time-to-sample atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for time-to-sample flags. Set this field to 0. + + d.writeUInt(t.timeToSamples.size()); // numberOfEntries + // A 32-bit integer containing the count of entries in the + // time-to-sample table. + + for (TimeToSampleGroup tts : t.timeToSamples) { + d.writeUInt(tts.getSampleCount()); // timeToSampleTable[0].sampleCount + // A 32-bit integer that specifies the number of consecutive + // samples that have the same duration. + + d.writeUInt(tts.getSampleDuration()); // timeToSampleTable[0].sampleDuration + // A 32-bit integer that specifies the duration of each + // sample. + } + /* sample to chunk atom -------- */ + // The sample-to-chunk atom contains a table that maps samples to chunks + // in the media data stream. By examining the sample-to-chunk atom, you + // can determine the chunk that contains a specific sample. + leaf = new DataAtom("stsc"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + sampleToChunkTable sampleToChunkTable[numberOfEntries]; + } sampleToChunkAtom; + + typedef struct { + int firstChunk; + int samplesPerChunk; + int sampleDescription; + } sampleToChunkTable; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this time-to-sample atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for time-to-sample flags. Set this field to 0. + + int entryCount = 0; + long previousSampleCount = -1; + long previousSampleDescriptionId = -1; + for (Chunk c : t.chunks) { + if (c.sampleCount != previousSampleCount// + || c.sampleDescriptionId != previousSampleDescriptionId) { + previousSampleCount = c.sampleCount; + previousSampleDescriptionId = c.sampleDescriptionId; + entryCount++; + } + } + + d.writeInt(entryCount); // number of entries + // A 32-bit integer containing the count of entries in the sample-to-chunk table. + + int firstChunk = 1; + previousSampleCount = -1; + previousSampleDescriptionId = -1; + for (Chunk c : t.chunks) { + if (c.sampleCount != previousSampleCount// + || c.sampleDescriptionId != previousSampleDescriptionId) { + previousSampleCount = c.sampleCount; + previousSampleDescriptionId = c.sampleDescriptionId; + + d.writeUInt(firstChunk); // first chunk + // The first chunk number using this table entry. + + d.writeUInt(c.sampleCount); // samples per chunk + // The number of samples in each chunk. + + d.writeInt(c.sampleDescriptionId); // sample description + + // The identification number associated with the sample description for + // the sample. For details on sample description atoms, see “Sample + // Description Atoms.”: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap2/chapter_3_section_5.html#//apple_ref/doc/uid/TP40000939-CH204-25691 + } + firstChunk++; + } + // + /* sync sample atom -------- */ + if (t.syncSamples != null) { + leaf = new DataAtom("stss"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + syncSampleTable syncSampleTable[numberOfEntries]; + } syncSampleAtom; + + typedef struct { + int number; + } syncSampleTable; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this time-to-sample atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for time-to-sample flags. Set this field to 0. + + d.writeUInt(t.syncSamples.size()); + // Number of entries + //A 32-bit integer containing the count of entries in the sync sample table. + + for (Long number : t.syncSamples) { + d.writeUInt(number); + // Sync sample table A table of sample numbers; each sample + // number corresponds to a key frame. + } + } + + + /* sample size atom -------- */ + // The sample size atom contains the sample count and a table giving the + // size of each sample. This allows the media data itself to be + // unframed. The total number of samples in the media is always + // indicated in the sample count. If the default size is indicated, then + // no table follows. + leaf = new DataAtom("stsz"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int sampleSize; + int numberOfEntries; + sampleSizeTable sampleSizeTable[numberOfEntries]; + } sampleSizeAtom; + + typedef struct { + int size; + } sampleSizeTable; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this time-to-sample atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for time-to-sample flags. Set this field to 0. + + int sampleUnit = t.mediaType == MediaType.AUDIO// + && ((AudioTrack) t).soundCompressionId != -2 // + ? ((AudioTrack) t).soundSampleSize / 8 * ((AudioTrack) t).soundNumberOfChannels// + : 1; + if (t.sampleSizes.size() == 1) { + d.writeUInt(t.sampleSizes.get(0).getSampleLength() / sampleUnit); // sample size + // A 32-bit integer specifying the sample size. If all the samples are + // the same size, this field contains that size value. If this field is + // set to 0, then the samples have different sizes, and those sizes are + // stored in the sample size table. + + d.writeUInt(t.sampleSizes.get(0).getSampleCount()); // number of entries + // A 32-bit integer containing the count of entries in the sample size + // table. + + } else { + d.writeUInt(0); // sample size + // A 32-bit integer specifying the sample size. If all the samples are + // the same size, this field contains that size value. If this field is + // set to 0, then the samples have different sizes, and those sizes are + // stored in the sample size table. + + + long count = 0; + for (SampleSizeGroup s : t.sampleSizes) { + count += s.sampleCount; + } + d.writeUInt(count); // number of entries + // A 32-bit integer containing the count of entries in the sample size + // table. + + for (SampleSizeGroup s : t.sampleSizes) { + long sampleSize = s.getSampleLength() / sampleUnit; + for (int i = 0; i < s.sampleCount; i++) { + d.writeUInt(sampleSize); // sample size + // The size field contains the size, in bytes, of the sample in + // question. The table is indexed by sample number—the first entry + // corresponds to the first sample, the second entry is for the + // second sample, and so on. + } + } + } + // + /* chunk offset atom -------- */ + // The chunk-offset table gives the index of each chunk into the + // QuickTime Stream. There are two variants, permitting the use of + // 32-bit or 64-bit offsets. The latter is useful when managing very + // large movies. Only one of these variants occurs in any single + // instance of a sample table atom. + if (t.chunks.isEmpty() || t.chunks.get(t.chunks.size() - 1).getChunkOffset() <= 0xffffffffL) { + /* 32-bit chunk offset atom -------- */ + leaf = new DataAtom("stco"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + chunkOffsetEntry[numberOfEntries] chunkOffsetTable; + } chunkOffsetAtom; + + typedef struct { + int offset; + } chunkOffsetEntry; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this time-to-sample atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for time-to-sample flags. Set this field to 0. + + d.writeUInt(t.chunks.size()); // number of entries + // A 32-bit integer containing the count of entries in the chunk + // offset table. + for (Chunk c : t.chunks) { + d.writeUInt(c.getChunkOffset() + mdatOffset); // offset + // The offset contains the byte offset from the beginning of the + // data stream to the chunk. The table is indexed by chunk + // number—the first table entry corresponds to the first chunk, + // the second table entry is for the second chunk, and so on. + } + } else { + /* 64-bit chunk offset atom -------- */ + leaf = new DataAtom("co64"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + chunkOffset64Entry[numberOfEntries] chunkOffset64Table; + } chunkOffset64Atom; + + typedef struct { + long offset; + } chunkOffset64Entry; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this time-to-sample atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for time-to-sample flags. Set this field to 0. + + d.writeUInt(t.chunks.size()); // number of entries + // A 32-bit integer containing the count of entries in the chunk + // offset table. + + for (Chunk c : t.chunks) { + d.writeLong(c.getChunkOffset()); // offset + // The offset contains the byte offset from the beginning of the + // data stream to the chunk. The table is indexed by chunk + // number—the first table entry corresponds to the first chunk, + // the second table entry is for the second chunk, and so on. + } + } + } + + private void writeGenericSampleDescriptionAtom(Track t, CompositeAtom stblAtom) { + // empty, for now + } + + /** + * Writes a version of the movie which is optimized for the web into the + * specified output file.

This method finishes the movie and then copies + * its content into the specified file. The web-optimized file starts with + * the movie header. + * + * @param outputFile The output file + * @param compressHeader Whether the movie header shall be compressed. + */ + public void toWebOptimizedMovie(File outputFile, boolean compressHeader) throws IOException { + finish(); + long originalMdatOffset = mdatAtom.getOffset(); + CompositeAtom originalMoovAtom = moovAtom; + mdatOffset = 0; + + ImageOutputStream originalOut = out; + try { + out = null; + + if (compressHeader) { + ByteArrayImageOutputStream buf = new ByteArrayImageOutputStream(); + int maxIteration = 5; + long compressionHeadersSize = 40 + 8; + long headerSize = 0; + long freeSize = 0; + while (true) { + mdatOffset = compressionHeadersSize + headerSize + freeSize; + buf.reset(); + DeflaterOutputStream deflater = new DeflaterOutputStream(new ImageOutputStreamAdapter(buf)); + out = new MemoryCacheImageOutputStream(deflater); + writeEpilog(); + out.close(); + deflater.close(); + + if (buf.size() > headerSize + freeSize && --maxIteration > 0) { + if (headerSize != 0) { + freeSize = Math.max(freeSize, buf.size() - headerSize - freeSize); + } + headerSize = buf.size(); + } else { + freeSize = headerSize + freeSize - buf.size(); + headerSize = buf.size(); + break; + } + } + + if (buf.size() == 0) { + compressHeader = false; + System.err.println("WARNING MP4Writer failed to compress header."); + } else { + out = new FileImageOutputStream(outputFile); + writeProlog(); + + // 40 bytes compression headers + QTFFImageOutputStream daos = new QTFFImageOutputStream(out); + daos.writeUInt(headerSize + 40); + daos.writeType("moov"); + + daos.writeUInt(headerSize + 32); + daos.writeType("cmov"); + + daos.writeUInt(12); + daos.writeType("dcom"); + daos.writeType("zlib"); + + daos.writeUInt(headerSize + 12); + daos.writeType("cmvd"); + daos.writeUInt(originalMoovAtom.size()); + + daos.write(buf.getBuffer(), 0, buf.size()); + + // 8 bytes "free" atom + free data + daos.writeUInt(freeSize + 8); + daos.writeType("free"); + for (int i = 0; i < freeSize; i++) { + daos.write(0); + } + } + } + if (!compressHeader) { + out = new FileImageOutputStream(outputFile); + mdatOffset = moovAtom.size(); + writeProlog(); + writeEpilog(); + } + + + byte[] buf = new byte[4096]; + originalOut.seek((originalMdatOffset)); + for (long count = 0, n = mdatAtom.size(); count < n; ) { + int read = originalOut.read(buf, 0, (int) Math.min(buf.length, n - count)); + out.write(buf, 0, read); + count += read; + } + out.close(); + } finally { + mdatOffset = 0; + moovAtom = originalMoovAtom; + out = originalOut; + } + } + + protected void writeVideoSampleDescriptionAtom(VideoTrack t, CompositeAtom stblAtom) throws IOException { + CompositeAtom leaf; + QTFFImageOutputStream d; + + /* Sample Description box ------- */ + leaf = new CompositeAtom("stsd"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + SampleDescriptionEntry table[numberOfEntries]; + } SampleDescriptionBox; + + typedef struct { + int size; + magic type; + byte[6] reserved; // six bytes that must be zero + short dataReferenceIndex; // A 16-bit integer that contains the index of the data reference to + // use to retrieve data associated with samples that use this sample description. + // Data references are stored in data reference atoms. + ubyte[size - 16] data; + } SampleDescriptionEntry; + + typedef struct { + uint16 predefined; // always 0 + uint16 reserved; // always 0 + magic vendor; // A 32-bit integer that specifies the developer of the + // compressor that generated the compressed data. Often + // this field contains 'appl' to indicate Apple + // Computer, Inc. + uint temporalQuality;// A 32-bit integer containing a value from 0 to 1023 + // indicating the degree of temporal compression. + uint spatialQuality;// A 32-bit integer containing a value from 0 to 1024 + // indicating the degree of spatial compression. + uint16 width; + uint16 height; + fixed16d16 horizontalResolution; + fixed16d16 verticalResolution; + uint32 reserved; // always 0 + uint16 frameCount;//how many frames of compressed data are stored in each sample. + // Usually set to 1. + pstring32 compressorname; + uint8 reserved; // always 0 + uint8 depth; + int16 predefined3; // should always be -1 + uint8 undocumentedByte[predefined3!=-1]; + SampleDescriptionExtension[] extendedData; + } VisualSampleEntry; + */ + d = leaf.getOutputStream(); + d.write(0); // version + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + d.writeInt(1); // number of Entries + + // Sample Description Entry + // ------------------------ + long sizeStreamPosition = d.getStreamPosition(); + d.writeInt(0); // sampleDescriptionTable[0].size + d.writeType(t.mediaCompressionType); // sampleDescriptionTable[0].type + d.write(new byte[6]); // sampleDescriptionTable[0].reserved + d.writeShort(1); // sampleDescriptionTable[0].dataReferenceIndex + + // Visual Sample Entry + // ------------------------------ + + d.writeShort(0); // predefined + d.writeShort(0); // reserved + d.writeType(t.componentManufacturer);//vendor + d.writeInt(0); // temporalQuality + d.writeInt(MathUtil.clamp((int) (1024 * (1 - t.videoQuality)), 0, 1024)); // spatialQuality + d.writeUShort((int) t.width); // width + d.writeUShort((int) t.height); // height + d.writeFixed16D16(72.0); // horizontalResolution + d.writeFixed16D16(72.0); // verticalResolution + d.writeInt(0);//reserved + d.writeShort(1); // frameCount + d.writePString(t.mediaCompressorName, 32); + d.write(0); // reserved + d.write(t.videoDepth); // depth + d.writeShort(-1); // predefined3 + + if (t.avcDecoderConfigurationRecord != null) { + writeMandatoryAvcAtoms(t, leaf); + } + + long size = (d.getStreamPosition() - sizeStreamPosition); + d.mark(); + d.seek(sizeStreamPosition); + d.writeInt((int) size); + d.reset(); + } + + /** + * Writes the avcC atom. + * + * @param t the track + * @param parent the composite atom + * @throws IOException on IO failure + */ + private void writeMandatoryAvcAtoms(VideoTrack t, CompositeAtom parent) throws IOException { + DataAtom leaf = new DataAtom("avcC"); + parent.add(leaf); + /* + typedef struct { + ubyte configurationVersion; // always = 1 + ubyte AVCProfileIndication; // Contains the profile code as defined in ISO/IEC 14496-10. + // profile_compatibility is a byte defined exactly the same as the byte which occurs + // between the profile_IDC and level_IDC in a sequence parameter set (SPS), + // as defined in ISO/IEC 14496-10 + ubyte profile_compatibility; // + ubyte AVCLevelIndication; // Contains the level code as defined in ISO/IEC 14496-10. + uint6 reserved1; // always 111111 + uint2 lengthSizeMinusOne; // Indicates the length in bytes of the NALUnitLength field in an AVC + // video sample or AVC parameter set sample of the associated stream minus one. + // For example, a size of one byte is indicated with a value of 0. The value of this field + // shall be one of 0, 1, or 3 corresponding to a length encoded with 1, 2, or 4 bytes, + // respectively. + uint3 reserved2; // always 111 + uint5 numOfSequenceParameterSets; // numOfSequenceParameterSets indicates the number of SPSs that are used as the + // initial set of SPSs for decoding the AVC elementary stream. + AvcSequenceParameterSet[numOfSequenceParameterSets] sequenceParameterSet; + uint8 numOfPictureParameterSets; // Indicates the number of picture parameter sets (PPSs) that are used as the + // initial set of PPSs for decoding the AVC elementary stream. + AvcPictureParameterSet[numOfPictureParameterSets] pictureParameterSet + } AvcDecoderConfigurationRecord; + */ + var d = leaf.getOutputStream(); + AvcDecoderConfigurationRecord r = t.avcDecoderConfigurationRecord; + d.writeByte(1);//version + d.writeByte(r.avcProfileIndication()); + d.writeByte(r.profileCompatibility()); + d.writeByte(r.avcLevelIndication()); + d.writeByte(0b111111_00 | (r.nalLengthSize() - 1)); + + Set spsList = r.sequenceParameterSetNALUnit(); + int n = Math.min(spsList.size(), (1 << 5) - 1); + d.writeByte(0b111_00000 | n); + Iterator it = spsList.iterator(); + for (int i = 0; i < n; i++) { + byte[] sps = it.next().getArray(); + d.writeShort((short) (sps.length + 1)); + d.writeByte((byte) 0x67); + d.write(sps); + } + + Set ppsList = r.pictureParameterSetNALUnit(); + n = Math.min(ppsList.size(), (1 << 8) - 1); + d.writeByte(n); + it = ppsList.iterator(); + for (int i = 0; i < n; i++) { + byte[] pps = it.next().getArray(); + d.writeShort((short) (pps.length + 1)); + d.writeByte((byte) 0x68); + d.write(pps); + } + + /* colr atom */ + /*---------*/ + /* + typedef struct { + magic colorParameterType; // An unsigned 32-bit field. + // The currently defined types are 'nclc' for video, and 'prof' for print. + uint16 primariesIndex; // A 16-bit unsigned integer containing an index into a table specifying the + // CIE 1931 xy chromaticity coordinates of the white point and the red, green, + // and blue primaries. + // Index 1 + // Recommendation ITU-R BT.709 white x = 0.3127 y = 0.3290 (CIE III. D65) red x = 0.640 y = 0.330 green x = 0.300 y = 0.600 blue x = 0.150 y = 0.060 + + uint16 transferFunctionIndex; // A 16-bit unsigned integer containing an index into a table specifying the + // nonlinear transfer function coefficients used to translate between RGB color space + // values and Y´CbCr values. + // Index 1 + // Recommendation ITU-R BT.709-2, SMPTE 274M-1995, 296M-1997, 293M-1996, 170M-1994 An image that shows two formulas for transfer functions for index 1. The first formula is E’ with subscript W is equal to four point five zero zero for zero is less than or equal to W is less than zero point zero one eight. The second formula is E’ with subscript W is equal to one point zero nine nine W raised to the power zero point four five, minus zero point zero nine nine for zero point zero one eight is less than or equal to W is less than or equal to one. + + uint16 matrixIndex; // A 16-bit unsigned integer containing an index into a table specifying the + // transformation matrix coefficients used to translate between RGB color space values + // and Y´CbCr values. + // Index 1 + // Recommendation ITU-R BT.709-2 (1125/60/2:1 only), SMPTE 274M-1995, 296M-1997 An image that shows the formula for matrix index 1. The formula is E’ with subscript Y is equal to zero point seven one five two E’ with subscript G, plus zero point zero seven two two E’ with subscript B, plus zero point two one two six E’ with subscript R. + // https://developer.apple.com/documentation/quicktime-file-format/color_parameter_atom + + } videoColrSampleDescriptionExtensionAtom; + */ + /* + leaf = new DataAtom("colr"); + parent.add(leaf); + d = leaf.getOutputStream(); + d.writeType("nclc"); + d.writeUShort(1); + d.writeUShort(1); + d.writeUShort(1); + */ + + /* pasp atom */ + /*---------*/ + /* + typedef struct { + uint32 hSpacing; // An unsigned 32-bit integer specifying the horizontal spacing of pixels, + // such as luma sampling instants for Y´CbCr or YUV video. + uint32 vSpacing; // An unsigned 32-bit integer specifying the vertical spacing of pixels, + // such as video picture lines. + } videoPaspSampleDescriptionExtensionAtom; + https://developer.apple.com/documentation/quicktime-file-format/pixel_aspect_ratio + */ + Rational pixelAspectRatio = t.format.get(VideoFormatKeys.PixelAspectRatioKey, Rational.ONE); + if (!pixelAspectRatio.equals(Rational.ONE)) { + leaf = new DataAtom("pasp"); + parent.add(leaf); + d = leaf.getOutputStream(); + d.writeUInt(pixelAspectRatio.getNumerator()); + d.writeUInt(pixelAspectRatio.getDenominator()); + } + } + + /** + * Color table atoms define a list of preferred colors for displaying + * the movie on devices that support only 256 colors. The list may + * contain up to 256 colors. These optional atoms have a type value of + * 'ctab'. The color table atom contains a Macintosh color table data + * structure. + * + * @param stblAtom + * @throws IOException + */ + protected void writeVideoColorTableAtom(VideoTrack t, CompositeAtom stblAtom) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; + leaf = new DataAtom("ctab"); + stblAtom.add(leaf); + + d = leaf.getOutputStream(); + + d.writeUInt(0); // Color table seed. A 32-bit integer that must be set to 0. + d.writeUShort(0x8000); // Color table flags. A 16-bit integer that must be set to 0x8000. + IndexColorModel videoColorTable = t.videoColorTable; + d.writeUShort(videoColorTable.getMapSize() - 1); + // Color table size. A 16-bit integer that indicates the number of + // colors in the following color array. This is a zero-relative value; + // setting this field to 0 means that there is one color in the array. + + for (int i = 0, n = videoColorTable.getMapSize(); i < n; ++i) { + // An array of colors. Each color is made of four unsigned 16-bit integers. + // The first integer must be set to 0, the second is the red value, + // the third is the green value, and the fourth is the blue value. + d.writeUShort(0); + d.writeUShort((videoColorTable.getRed(i) << 8) | videoColorTable.getRed(i)); + d.writeUShort((videoColorTable.getGreen(i) << 8) | videoColorTable.getGreen(i)); + d.writeUShort((videoColorTable.getBlue(i) << 8) | videoColorTable.getBlue(i)); + } + } + + protected void writeAudioSampleDescriptionAtom(AudioTrack t, CompositeAtom stblAtom) throws IOException { + // TO DO + DataAtom leaf; + QTFFImageOutputStream d; + + /* Sample Description atom ------- */ + // The sample description atom stores information that allows you to + // decode samples in the media. The data stored in the sample + // description varies, depending on the media type. For example, in the + // case of video media, the sample descriptions are image description + // structures. The sample description information for each media type is + // explained in “Media Data Atom Types”: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_1.html#//apple_ref/doc/uid/TP40000939-CH205-SW1 + leaf = new DataAtom("stsd"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + soundSampleDescriptionEntry sampleDescriptionTable[numberOfEntries]; + } soundSampleDescriptionAtom; + + typedef struct { + int size; + magic type; + byte[6] reserved; + short dataReferenceIndex; + soundSampleDescription data; + } soundSampleDescriptionEntry; + + typedef struct { + ushort version; + ushort revisionLevel; + uint vendor; + ushort numberOfChannels; + ushort sampleSize; + short compressionId; + ushort packetSize; + fixed16d16 sampleRate; + byte[] extendedData; + } soundSampleDescription; + */ + d = leaf.getOutputStream(); + + // soundSampleDescriptionAtom: + // --------------------------- + d.write(0); // version + // A 1-byte specification of the version of this sample description atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for sample description flags. Set this field to 0. + + d.writeInt(1); // number of Entries + // A 32-bit integer containing the number of sample descriptions that follow. + + // soundSampleDescriptionEntry: + // ---------------------------- + // A 32-bit integer indicating the number of bytes in the sample description. + d.writeUInt(4 + 12 + 20 + 16 + t.stsdExtensions.length); // sampleDescriptionTable[0].size + + // Common header: 12 bytes + d.writeType(t.mediaCompressionType); // sampleDescriptionTable[0].type + // A 32-bit integer indicating the format of the stored data. + // This depends on the media type, but is usually either the + // compression format or the media type. + + d.write(new byte[6]); // sampleDescriptionTable[0].reserved + // Six bytes that must be set to 0. + + d.writeUShort(1); // sampleDescriptionTable[0].dataReferenceIndex + // A 16-bit integer that contains the index of the data + // reference to use to retrieve data associated with samples + // that use this sample description. Data references are stored + // in data reference atoms. + + // Sound Sample Description (Version 0) 20 bytes + // ------------------------ + + d.writeUShort(1); // version + // A 16-bit integer that holds the sample description version (currently 0 or 1). + + d.writeUShort(0); // revisionLevel + // A 16-bit integer that must be set to 0. + + d.writeUInt(0); // vendor + // A 32-bit integer that must be set to 0. + + d.writeUShort(t.soundNumberOfChannels); // numberOfChannels + // A 16-bit integer that indicates the number of sound channels used by + // the sound sample. Set to 1 for monaural sounds, 2 for stereo sounds. + // Higher numbers of channels are not supported. + + d.writeUShort(t.soundSampleSize); // sampleSize (bits) + // A 16-bit integer that specifies the number of bits in each + // uncompressed sound sample. Allowable values are 8 or 16. Formats + // using more than 16 bits per sample set this field to 16 and use sound + // description version 1. + + d.writeUShort(t.soundCompressionId); // compressionId + // XXX - This must be set to -1, or the QuickTime player won't accept this file. + // A 16-bit integer that must be set to 0 for version 0 sound + // descriptions. This may be set to –2 for some version 1 sound + // descriptions; see “Redefined Sample Tables” (page 135). + + d.writeUShort(0); // packetSize + // A 16-bit integer that must be set to 0. + + d.writeFixed16D16(t.soundSampleRate); // sampleRate + // A 32-bit unsigned fixed-point number (16.16) that indicates the rate + // at which the sound samples were obtained. The integer portion of this + // number should match the media’s time scale. Many older version 0 + // files have values of 22254.5454 or 11127.2727, but most files have + // integer values, such as 44100. Sample rates greater than 2^16 are not + // supported. + + // Sound Sample Description Additional fields (only in Version 1) 16 bytes + // ------------------------ + d.writeUInt(t.soundSamplesPerPacket); // samplesPerPacket + // A 32-bit integer. + // The number of uncompressed samples generated by a + // compressed sample (an uncompressed sample is one sample + // from each channel). This is also the sample duration, + // expressed in the media’s timescale, where the + // timescale is equal to the sample rate. For + // uncompressed formats, this field is always 1. + // + d.writeUInt(t.soundBytesPerPacket); // bytesPerPacket + // A 32-bit integer. + // For uncompressed audio, the number of bytes in a + // sample for a single channel. This replaces the older + // sampleSize field, which is set to 16. + // This value is calculated by dividing the frame size + // by the number of channels. The same calculation is + // performed to calculate the value of this field for + // compressed audio, but the result of the calculation + // is not generally meaningful for compressed audio. + // + d.writeUInt(t.soundBytesPerFrame); // bytesPerFrame + // A 32-bit integer. + // The number of bytes in a sample: for uncompressed + // audio, an uncompressed frame; for compressed audio, a + // compressed frame. This can be calculated by + // multiplying the bytes per packet field by the number + // of channels. + // + d.writeUInt(t.soundBytesPerSample); // bytesPerSample + // A 32-bit integer. + // The size of an uncompressed sample in bytes. This is + // set to 1 for 8-bit audio, 2 for all other cases, even + // if the sample size is greater than 2 bytes. + + // Write stsd Extensions + // Extensions must be atom-based fields + // ------------------------------------ + d.write(t.stsdExtensions); + } +} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4Writer.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4Writer.java new file mode 100755 index 0000000..37012b8 --- /dev/null +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4Writer.java @@ -0,0 +1,492 @@ +/* + * @(#)MP4Writer.java + * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + */ +package org.monte.media.mp4; + +import org.monte.media.av.Buffer; +import org.monte.media.av.Codec; +import org.monte.media.av.Format; +import org.monte.media.av.FormatKeys; +import org.monte.media.av.FormatKeys.MediaType; +import org.monte.media.av.MovieWriter; +import org.monte.media.av.Registry; +import org.monte.media.av.codec.video.VideoFormatKeys; +import org.monte.media.math.Rational; +import org.monte.media.qtff.AbstractQTFFMovieStream; +import org.monte.media.qtff.AvcDecoderConfigurationRecord; + +import javax.imageio.stream.ImageOutputStream; +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.List; + +import static org.monte.media.av.BufferFlag.DISCARD; +import static org.monte.media.av.BufferFlag.KEYFRAME; +import static org.monte.media.av.FormatKeys.EncodingKey; +import static org.monte.media.av.FormatKeys.FrameRateKey; +import static org.monte.media.av.FormatKeys.MIME_JAVA; +import static org.monte.media.av.FormatKeys.MIME_MP4; +import static org.monte.media.av.FormatKeys.MIME_QUICKTIME; +import static org.monte.media.av.FormatKeys.MediaTypeKey; +import static org.monte.media.av.FormatKeys.MimeTypeKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ByteOrderKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ChannelsKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ENCODING_PCM_SIGNED; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ENCODING_PCM_UNSIGNED; +import static org.monte.media.av.codec.audio.AudioFormatKeys.FrameSizeKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleRateKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleSizeInBitsKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SignedKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.CompressorNameKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.DataClassKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.DepthKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.ENCODING_BUFFERED_IMAGE; +import static org.monte.media.av.codec.video.VideoFormatKeys.HeightKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.PaletteKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.QualityKey; +import static org.monte.media.av.codec.video.VideoFormatKeys.WidthKey; + +/** + * Supports writing of time-based video and audio data into a MP4 movie + * file (.MP4) without the need of native code. + * + * @author Werner Randelshofer + */ +public class MP4Writer extends MP4OutputStream implements MovieWriter { + private static class TrackEncoder { + /** + * The codec. + */ + public Codec codec; + public Buffer outputBuffer; + public Buffer inputBuffer; + } + + private List trackEncoders = new ArrayList<>(); + + public final static Format MP4 = new Format(MediaTypeKey, MediaType.FILE, MimeTypeKey, MIME_MP4); + + /** + * Creates a new MP4 writer. + * + * @param file the output file + */ + public MP4Writer(File file) throws IOException { + super(file); + } + + /** + * Creates a new MP4 writer. + * + * @param out the output stream. + */ + public MP4Writer(ImageOutputStream out) throws IOException { + super(out); + } + + + @Override + public Format getFileFormat() throws IOException { + return MP4; + } + + @Override + public Format getFormat(int track) { + return tracks.get(track).format; + } + + /** + * Adds a track. + * + * @param fmt The format of the track. + * @return The track number. + */ + @Override + public int addTrack(Format fmt) throws IOException { + if (fmt.get(MediaTypeKey) == MediaType.VIDEO) { + int t = addVideoTrack(fmt.get(EncodingKey), + fmt.get(CompressorNameKey, AbstractQTFFMovieStream.DEFAULT_COMPONENT_NAME), + Math.min(6000, fmt.get(FrameRateKey).getNumerator() * fmt.get(FrameRateKey).getDenominator()), + fmt.get(WidthKey), fmt.get(HeightKey), fmt.get(DepthKey, 24), + (int) fmt.get(FrameRateKey).getDenominator(), fmt); + setCompressionQuality(t, fmt.get(QualityKey, 1.0f)); + return t; + } else if (fmt.get(MediaTypeKey) == MediaType.AUDIO) { + // fill in unspecified values + int sampleSizeInBits = fmt.get(SampleSizeInBitsKey, 16); + ByteOrder bo = fmt.get(ByteOrderKey, ByteOrder.BIG_ENDIAN); + boolean signed = fmt.get(SignedKey, true); + String encoding = fmt.get(EncodingKey, null); + Rational frameRate = fmt.get(FrameRateKey, fmt.get(SampleRateKey)); + int channels = fmt.get(ChannelsKey, 1); + int frameSize = fmt.get(FrameSizeKey, (sampleSizeInBits + 7) / 8); + if (encoding == null || encoding.length() != 4) { + if (signed) { + encoding = bo == ByteOrder.BIG_ENDIAN ? "twos" : "sowt"; + } else { + encoding = "raw "; + } + } + + return addAudioTrack(encoding, + fmt.get(SampleRateKey).longValue(), + fmt.get(SampleRateKey).doubleValue(), + channels, + sampleSizeInBits, + false, // FIXME - We should support compressed formats + fmt.get(SampleRateKey).divide(frameRate).intValue(), + frameSize, + signed, + bo); + //return addAudioTrack(AudioFormatKeys.toAudioFormat(fmt)); // FIXME Add direct support for AudioFormat + } else { + throw new IOException("Unsupported media type:" + fmt.get(MediaTypeKey)); + } + } + + /** + * Adds a video track. + * + * @param format The MP4 video format. + * @param timeScale The media timescale. This is typically the frame rate. + * If the frame rate is not an integer fraction of a second, specify a + * multiple of the frame rate and specify a correspondingly multiplied + * sampleDuration when writing frames. For example, for a rate of 23.976 fps + * specify a timescale of 23976 and multiply the sampleDuration of a video + * frame by 1000. + * @param width The width of a video image. Must be larger than 0. + * @param height The height of a video image. Must be larger than 0. + * @return Returns the track index. + * @throws IllegalArgumentException if the width or the height is smaller + * than 1. + */ + public int addVideoTrack(Format format, long timeScale, int width, int height) throws IOException { + int tr = addVideoTrack(format.get(EncodingKey), format.get(CompressorNameKey), timeScale, width, height, 24, 30, format); + setVideoColorTable(tr, format.get(PaletteKey)); + return tr; + } + + /** + * Adds a video track. + * + * @param format The MP4 video format. + * @param width The width of a video image. Must be larger than 0. + * @param height The height of a video image. Must be larger than 0. + * @return Returns the track index. + * @throws IllegalArgumentException if the width or the height is smaller + * than 1. + */ + public int addVideoTrack(Format format, int width, int height, int depth, int syncInterval) throws IOException { + int tr = addVideoTrack(format.get(EncodingKey), format.get(CompressorNameKey), format.get(FrameRateKey).getDenominator() * format.get(FrameRateKey).getNumerator(), width, height, depth, syncInterval, format); + setVideoColorTable(tr, format.get(PaletteKey)); + return tr; + } + + /** + * Adds an audio track, and configures it using an {@code AudioFormat} + * object from the javax.sound API. + *

+ * Use this method for writing audio data from an {@code AudioInputStream} + * into a MP4 Movie file. + * + * @param format The javax.sound audio format. + * @return Returns the track index. + */ + public int addAudioTrack(javax.sound.sampled.AudioFormat format) throws IOException { + ensureStarted(); + String qtAudioFormat; + double sampleRate = format.getSampleRate(); + long timeScale = (int) Math.floor(sampleRate); + int sampleSizeInBits = format.getSampleSizeInBits(); + int numberOfChannels = format.getChannels(); + ByteOrder byteOrder = format.isBigEndian() ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; + int frameDuration = (int) (format.getSampleRate() / format.getFrameRate()); + int frameSize = format.getFrameSize(); + boolean isCompressed = format.getProperty("vbr") != null && ((Boolean) format.getProperty("vbr")).booleanValue(); + boolean signed = false; + javax.sound.sampled.AudioFormat.Encoding enc = format.getEncoding(); + if (enc.equals(javax.sound.sampled.AudioFormat.Encoding.ALAW)) { + qtAudioFormat = "alaw"; + if (sampleSizeInBits != 8) { + throw new IllegalArgumentException("Sample size of 8 for ALAW required:" + sampleSizeInBits); + } + } else if (javax.sound.sampled.AudioFormat.Encoding.PCM_SIGNED.equals(enc)) { + qtAudioFormat = switch (sampleSizeInBits) { + case 8 ->// Requires conversion to PCM_UNSIGNED! + "raw "; + case 16 -> (byteOrder == ByteOrder.BIG_ENDIAN) ? "twos" : "sowt"; + case 24 -> "in24"; + case 32 -> "in32"; + default -> + throw new IllegalArgumentException("Unsupported sample size for PCM_SIGNED:" + sampleSizeInBits); + }; + } else if (javax.sound.sampled.AudioFormat.Encoding.PCM_UNSIGNED.equals(enc)) { + qtAudioFormat = switch (sampleSizeInBits) { + case 8 -> "raw "; + case 16 ->// Requires conversion to PCM_SIGNED! + (byteOrder == ByteOrder.BIG_ENDIAN) ? "twos" : "sowt"; + case 24 ->// Requires conversion to PCM_SIGNED! + "in24"; + case 32 ->// Requires conversion to PCM_SIGNED! + "in32"; + default -> + throw new IllegalArgumentException("Unsupported sample size for PCM_UNSIGNED:" + sampleSizeInBits); + }; + } else if (javax.sound.sampled.AudioFormat.Encoding.ULAW.equals(enc)) { + if (sampleSizeInBits != 8) { + throw new IllegalArgumentException("Sample size of 8 for ULAW required:" + sampleSizeInBits); + } + qtAudioFormat = "ulaw"; + } else if ("MP3".equals(enc.toString())) { + qtAudioFormat = ".mp3"; + } else { + qtAudioFormat = format.getEncoding().toString(); + if (qtAudioFormat == null || qtAudioFormat.length() != 4) { + throw new IllegalArgumentException("Unsupported encoding:" + format.getEncoding()); + } + } + + return addAudioTrack(qtAudioFormat, timeScale, sampleRate, + numberOfChannels, sampleSizeInBits, + isCompressed, frameDuration, frameSize, signed, byteOrder); + } + + @Override + public int getTrackCount() { + return tracks.size(); + } + + /** + * Returns the sampleDuration of the track in seconds. + */ + @Override + public Rational getDuration(int track) { + AbstractQTFFMovieStream.Track tr = tracks.get(track); + return new Rational(tr.mediaDuration, tr.mediaTimeScale); + } + + private Codec createCodec(Format fmt) { + return Registry.getInstance().getEncoder(fmt.prepend(MimeTypeKey, MIME_QUICKTIME)); + } + + private void createCodec(int track) { + AbstractQTFFMovieStream.Track tr = tracks.get(track); + TrackEncoder tre = getTrackEncoder(track); + Format fmt = tr.format; + tre.codec = createCodec(fmt); + if (tre.codec != null) { + if (fmt.get(MediaTypeKey) == MediaType.VIDEO) { + tre.codec.setInputFormat(fmt.prepend( + MimeTypeKey, MIME_JAVA, EncodingKey, ENCODING_BUFFERED_IMAGE, + DataClassKey, BufferedImage.class)); + + if (null == tre.codec.setOutputFormat( + fmt.prepend( + QualityKey, getCompressionQuality(track), + MimeTypeKey, MIME_QUICKTIME, + DataClassKey, byte[].class))) { + throw new UnsupportedOperationException("Input format not supported:" + fmt); + } + } else { + tre.codec.setInputFormat(fmt.prepend( + MimeTypeKey, MIME_JAVA, EncodingKey, fmt.containsKey(SignedKey) && fmt.get(SignedKey) ? ENCODING_PCM_SIGNED : ENCODING_PCM_UNSIGNED, + DataClassKey, byte[].class)); + if (tre.codec.setOutputFormat(fmt) == null) { + throw new UnsupportedOperationException("Codec output format not supported:" + fmt + " codec:" + tre.codec); + } else { + tr.format = tre.codec.getOutputFormat(); + } + } + } + } + + /** + * Returns the codec of the specified track. + */ + public Codec getCodec(int track) { + return getTrackEncoder(track).codec; + } + + /** + * Sets the codec for the specified track. + */ + public void setCodec(int track, Codec codec) { + getTrackEncoder(track).codec = codec; + } + + /** + * Writes a sample. Does nothing if the discard-flag in the buffer is set to + * true. + * + * @param track The track number. + * @param buf The buffer containing the sample data. + */ + @Override + public void write(int track, Buffer buf) throws IOException { + ensureStarted(); + AbstractQTFFMovieStream.Track tr = tracks.get(track); + TrackEncoder tre = getTrackEncoder(track); + + // Encode sample data + { + if (tre.outputBuffer == null) { + tre.outputBuffer = new Buffer(); + tre.outputBuffer.format = tr.format; + } + Buffer outBuf; + if (tr.format.matchesWithout(buf.format, FrameRateKey)) { + outBuf = buf; + } else { + outBuf = tre.outputBuffer; + boolean isSync = tr.syncInterval == 0 ? false : tr.sampleCount % tr.syncInterval == 0; + buf.setFlag(KEYFRAME, isSync); + if (tre.codec == null) { + createCodec(track); + if (tre.codec == null) { + throw new UnsupportedOperationException("No codec for this format " + tr.format); + } + } + + tre.codec.process(buf, outBuf); + } + if (outBuf.isFlag(DISCARD) || outBuf.sampleCount == 0) { + return; + } + + // Compute sample sampleDuration in media timescale + Rational sampleDuration; + if (tr.startTime == null) { + tr.startTime = buf.timeStamp; + } + Rational exactSampleDuration = outBuf.sampleDuration.multiply(outBuf.sampleCount); + sampleDuration = exactSampleDuration.floor(tr.mediaTimeScale); + if (sampleDuration.compareTo(new Rational(0, 1)) <= 0) { + sampleDuration = new Rational(1, tr.mediaTimeScale); + } + long sampleDurationInMediaTS = sampleDuration.getNumerator() * (tr.mediaTimeScale / sampleDuration.getDenominator()); + + writeSamples(track, outBuf.sampleCount, (byte[]) outBuf.data, outBuf.offset, outBuf.length, + sampleDurationInMediaTS / outBuf.sampleCount, outBuf.isFlag(KEYFRAME)); + + if (outBuf.header instanceof AvcDecoderConfigurationRecord) { + AvcDecoderConfigurationRecord r = (AvcDecoderConfigurationRecord) outBuf.header; + writeAvcDecoderConfigurationRecord(track, r); + } + } + } + + + /** + * Encodes an image as a video frame and writes it into a video track. + * + * @param track The track index. + * @param image The image of the video frame. + * @param duration The sampleDuration of the video frame in media timescale + * units. + * @throws IOException if writing the sample data failed. + */ + public void write(int track, BufferedImage image, long duration) throws IOException { + Track tr = tracks.get(track); + TrackEncoder tre = getTrackEncoder(track); + Buffer buf = new Buffer(); + buf.data = image; + buf.sampleDuration = Rational.valueOf(duration, tr.mediaTimeScale); + buf.format = new Format(FormatKeys.MediaTypeKey, FormatKeys.MediaType.VIDEO, + VideoFormatKeys.DataClassKey, BufferedImage.class, + WidthKey, image.getWidth(), + HeightKey, image.getHeight() + ); + write(track, buf); + } + + private TrackEncoder getTrackEncoder(int track) { + while (trackEncoders.size() <= track) { + trackEncoders.add(new TrackEncoder()); + } + return trackEncoders.get(track); + } + + /** + * Writes a sample from a byte array into a track. + *

+ * This method encodes the sample if the format of the track does not match + * the format of the media in this track. + * + * @param track The track index. + * @param data The sample data. + * @param off The start offset in the data. + * @param len The number of bytes to write. + * @param duration The duration of the sample in media timescale units. + * @param isSync Whether the sample is a sync sample (keyframe). + * @throws IllegalArgumentException if the sampleDuration is less than 1. + * @throws IOException if writing the sample data failed. + */ + @Deprecated + public void write(int track, byte[] data, int off, int len, long duration, boolean isSync) throws IOException { + writeSamples(track, 1, data, off, len, duration, isSync); + } + + /** + * Writes multiple already encoded samples from a byte array into a track. + *

+ * This method does not inspect the contents of the data. The contents has + * to match the format and dimensions of the media in this track. + * + * @param track The track index. + * @param sampleCount The number of samples. + * @param data The encoded sample data. + * @param off The start offset in the data. + * @param len The number of bytes to write. Must be dividable by + * sampleCount. + * @param sampleDuration The sampleDuration of a sample. All samples must + * have the same sampleDuration. + * @param isSync Whether the samples are sync samples. All samples must + * either be sync samples or non-sync samples. + * @throws IllegalArgumentException if the sampleDuration is less than 1. + * @throws IOException if writing the sample data failed. + */ + @Deprecated + public void write(int track, int sampleCount, byte[] data, int off, int len, long sampleDuration, boolean isSync) throws IOException { + AbstractQTFFMovieStream.Track tr = tracks.get(track); + TrackEncoder tre = getTrackEncoder(track); + if (tre.codec == null) { + writeSamples(track, sampleCount, data, off, len, sampleDuration, isSync); + } else { + if (tre.outputBuffer == null) { + tre.outputBuffer = new Buffer(); + } + if (tre.inputBuffer == null) { + tre.inputBuffer = new Buffer(); + } + Buffer outb = tre.outputBuffer; + Buffer inb = tre.inputBuffer; + inb.data = data; + inb.offset = off; + inb.length = len; + inb.sampleDuration = new Rational(sampleDuration, tr.mediaTimeScale); + inb.sampleCount = sampleCount; + inb.setFlag(KEYFRAME, isSync); + tre.codec.process(inb, outb); + if (!outb.isFlag(DISCARD)) { + writeSample(track, (byte[]) outb.data, outb.offset, outb.length, outb.sampleCount, outb.isFlag(KEYFRAME)); + } + } + } + + /** + * Returns true because MP4 supports variable frame rates. + */ + public boolean isVFRSupported() { + return true; + } + + + @Override + public boolean isEmpty(int track) { + return tracks.get(track).isEmpty(); + } +} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4WriterSpi.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4WriterSpi.java new file mode 100755 index 0000000..c2d2a56 --- /dev/null +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mp4/MP4WriterSpi.java @@ -0,0 +1,45 @@ +/* + * @(#)MP4WriterSpi.java + * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + */ +package org.monte.media.mp4; + +import org.monte.media.av.Format; +import org.monte.media.av.MovieWriter; +import org.monte.media.av.MovieWriterSpi; + +import javax.imageio.stream.ImageOutputStream; +import java.io.File; +import java.io.IOException; +import java.util.List; + +/** + * MP4WriterSpi. + * + * @author Werner Randelshofer + */ +public class MP4WriterSpi implements MovieWriterSpi { + + private final static List extensions = List.of(new String[]{"mp4x", "m4vx"}); + + @Override + public MovieWriter create(File file) throws IOException { + return new MP4Writer(file); + } + + @Override + public MovieWriter create(ImageOutputStream out) throws IOException { + return new MP4Writer(out); + } + + @Override + public List getExtensions() { + return extensions; + } + + @Override + public Format getFileFormat() { + return MP4Writer.MP4; + } + +} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/mpo/MPOImageReader.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mpo/MPOImageReader.java index 5c5c4ed..0a50812 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/mpo/MPOImageReader.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/mpo/MPOImageReader.java @@ -9,7 +9,7 @@ import org.monte.media.exif.EXIFTagSet; import org.monte.media.exif.MPEntryTagSet; import org.monte.media.exif.MPFTagSet; -import org.monte.media.io.SubImageInputStream; +import org.monte.media.io.FilterImageInputStream; import org.monte.media.jpeg.CMYKJPEGImageReader; import org.monte.media.tiff.BaselineTagSet; import org.monte.media.tiff.TIFFDirectory; @@ -127,7 +127,7 @@ public BufferedImage read(int imageIndex, ImageReadParam param) throw new IndexOutOfBoundsException("illegal imageIndex=" + imageIndex); } ImageInputStream in = (ImageInputStream) getInput(); - SubImageInputStream sin = new SubImageInputStream(in, imageOffsets[imageIndex], imageLengths[imageIndex]); + FilterImageInputStream sin = new FilterImageInputStream(in, imageOffsets[imageIndex], imageLengths[imageIndex]); sin.seek(0); ImageReader ir = new CMYKJPEGImageReader(getOriginatingProvider()); @@ -180,7 +180,8 @@ private void readHeader() throws IOException { TIFFNode imageNode = metaDataTree.getChildAt(i); for (Iterator e = imageNode.preorderIterator(); e.hasNext(); ) { TIFFNode node = e.next(); - if (node instanceof TIFFDirectory dir) { + if (node instanceof TIFFDirectory) { + TIFFDirectory dir = (TIFFDirectory) node; if ((mde = dir.getField(BaselineTagSet.ImageWidth)) != null) { width[i] = ((Number) mde.getData()).intValue(); } @@ -209,9 +210,10 @@ private void readHeader() throws IOException { int index = 0; for (Iterator e = er.getMetaDataTree().preorderIterator(); e.hasNext(); ) { TIFFNode n = e.next(); - if (n instanceof TIFFDirectory dir) { + if (n instanceof TIFFDirectory) { + TIFFDirectory dir = (TIFFDirectory) n; if (dir.getName() != null && dir.getName().equals("MPEntry")) { - long dirOffset = dir.getFileSegments().getFirst().offset(); + long dirOffset = dir.getFileSegments().get(0).offset(); TIFFField offsetField = dir.getField(MPEntryTagSet.IndividualImageDataOffset); TIFFField lengthField = dir.getField(MPEntryTagSet.IndividualImageSize); if (offsetField != null && lengthField != null) { diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/AbstractQuickTimeStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AbstractQTFFMovieStream.java similarity index 59% rename from org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/AbstractQuickTimeStream.java rename to org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AbstractQTFFMovieStream.java index d3bd7d8..4d4bed7 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/AbstractQuickTimeStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AbstractQTFFMovieStream.java @@ -1,34 +1,29 @@ /* - * @(#)AbstractQuickTimeStream.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + * @(#)AbstractQTFFMovieStream.java + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ -package org.monte.media.quicktime; +package org.monte.media.qtff; -import org.monte.media.av.Buffer; -import org.monte.media.av.Codec; import org.monte.media.av.Format; import org.monte.media.av.FormatKeys.MediaType; -import org.monte.media.io.ImageOutputStreamAdapter; import org.monte.media.math.Rational; import javax.imageio.stream.ImageOutputStream; import java.awt.image.IndexColorModel; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; -import java.util.Date; import java.util.LinkedList; /** * This is the base class for low-level QuickTime stream IO. * - *

FIXME - Separation between AbstractQuickTimeStream and - * QuickTimeOutputStream is not clean. Move write methods in the track classes - * down to QuickTimeOutputStream.

- * * @author Werner Randelshofer */ -public class AbstractQuickTimeStream { +public class AbstractQTFFMovieStream { + public static final String DEFAULT_COMPONENT_NAME = "Monte Media"; + public static final String DEFAULT_COMPONENT_MANUFACTURER = "java"; /** * Underlying output stream. */ @@ -54,13 +49,13 @@ public class AbstractQuickTimeStream { /** * Creation time of the movie. */ - protected Date creationTime; + protected Instant creationTime; /** * Modification time of the movie. */ - protected Date modificationTime; + protected Instant modificationTime; /** - * The timeScale of the movie. A time value that indicates the time scale + * The timeScale of the movie. A time value that indicates the timescale * for this media—that is, the number of time units that pass per second in * its time coordinate system. */ @@ -80,7 +75,7 @@ public class AbstractQuickTimeStream { */ protected long previewTime = 0; /** - * The duration of the movie preview in movie time scale units. + * The duration of the movie preview in movie timescale units. */ protected long previewDuration = 0; /** @@ -92,7 +87,7 @@ public class AbstractQuickTimeStream { */ protected long selectionTime = 0; /** - * The duration of the current selection in movie time scale units. + * The duration of the current selection in movie timescale units. */ protected long selectionDuration = 0; /** @@ -107,50 +102,27 @@ public class AbstractQuickTimeStream { * The transformation matrix for the entire movie. */ protected double[] movieMatrix = {1, 0, 0, 0, 1, 0, 0, 0, 1}; - - /** - * The states of the movie output stream. - */ - protected static enum States { - - REALIZED, STARTED, FINISHED, CLOSED; - } - /** * The current state of the movie output stream. */ protected States state = States.REALIZED; - public void setTrackEnabled(int track, boolean newValue) { - tracks.get(track).setEnabled(newValue); - } - - public boolean isTrackEnabled(int track) { - return tracks.get(track).isEnabled(); - } - - public void setTrackInMovie(int track, boolean newValue) { - tracks.get(track).setInMovie(newValue); - } - - public boolean isTrackInMovie(int track) { - return tracks.get(track).isInMovie(); - } - - public void setTrackInPreview(int track, boolean newValue) { - tracks.get(track).setInPreview(newValue); - } - - public boolean isTrackInPreview(int track) { - return tracks.get(track).isInPreview(); - } + protected static String intToType(int id) { + char[] b = new char[4]; - public void setTrackInPoster(int track, boolean newValue) { - tracks.get(track).setInPoster(newValue); + b[0] = (char) ((id >>> 24) & 0xff); + b[1] = (char) ((id >>> 16) & 0xff); + b[2] = (char) ((id >>> 8) & 0xff); + b[3] = (char) (id & 0xff); + return String.valueOf(b); } - public boolean isTrackInPoster(int track) { - return tracks.get(track).isInPoster(); + protected static int typeToInt(String str) { + int value = ((str.charAt(0) & 0xff) << 24) |// + ((str.charAt(1) & 0xff) << 16) | // + ((str.charAt(2) & 0xff) << 8) | // + (str.charAt(3) & 0xff); + return value; } /** @@ -166,6 +138,22 @@ protected long getRelativeStreamPosition() throws IOException { return out.getStreamPosition() - streamOffset; } + public boolean isTrackEnabled(int track) { + return tracks.get(track).isEnabled(); + } + + public boolean isTrackInMovie(int track) { + return tracks.get(track).isInMovie(); + } + + public boolean isTrackInPoster(int track) { + return tracks.get(track).isInPoster(); + } + + public boolean isTrackInPreview(int track) { + return tracks.get(track).isInPreview(); + } + /** * Seeks relative to the beginning of the QuickTime stream.

Usually this * equal to seeking in the underlying ImageOutputStream, but can be @@ -175,507 +163,594 @@ protected void seekRelative(long newPosition) throws IOException { out.seek(newPosition + streamOffset); } - protected static int typeToInt(String str) { - int value = ((str.charAt(0) & 0xff) << 24) |// - ((str.charAt(1) & 0xff) << 16) | // - ((str.charAt(2) & 0xff) << 8) | // - (str.charAt(3) & 0xff); - return value; + public void setTrackEnabled(int track, boolean newValue) { + tracks.get(track).setEnabled(newValue); } - protected static String intToType(int id) { - char[] b = new char[4]; + public void setTrackInMovie(int track, boolean newValue) { + tracks.get(track).setInMovie(newValue); + } - b[0] = (char) ((id >>> 24) & 0xff); - b[1] = (char) ((id >>> 16) & 0xff); - b[2] = (char) ((id >>> 8) & 0xff); - b[3] = (char) (id & 0xff); - return String.valueOf(b); + public void setTrackInPoster(int track, boolean newValue) { + tracks.get(track).setInPoster(newValue); + } + + public void setTrackInPreview(int track, boolean newValue) { + tracks.get(track).setInPreview(newValue); } /** - * Atom base class. + * The states of the movie output stream. */ - protected abstract class Atom { + protected static enum States { + + REALIZED, STARTED, FINISHED, CLOSED; + } + + /** + * Groups consecutive samples with same characteristics. + */ + protected abstract static class Group { + + protected final static long maxSampleCount = Integer.MAX_VALUE; + public long sampleCount; + protected Sample firstSample; + protected Sample lastSample; + + protected Group(Sample firstSample) { + this.firstSample = this.lastSample = firstSample; + sampleCount = 1; + } + + protected Group(Sample firstSample, Sample lastSample, long sampleCount) { + this.firstSample = firstSample; + this.lastSample = lastSample; + this.sampleCount = sampleCount; + if (sampleCount > maxSampleCount) { + throw new IllegalArgumentException("Capacity exceeded"); + } + } + + protected Group(Group group) { + this.firstSample = group.firstSample; + this.lastSample = group.lastSample; + sampleCount = group.sampleCount; + } + + public long getSampleCount() { + return sampleCount; + } /** - * The type of the atom. A String with the length of 4 characters. - */ - protected String type; - /** - * The offset of the atom relative to the start of the - * ImageOutputStream. + * Returns true, if the chunk was added to the group. If false is + * returned, the chunk must be added to a new group.

A chunk can + * only be added to a group, if the capacity of the group is not + * exceeded. */ - protected long offset; + protected boolean maybeAddChunk(Chunk chunk) { + if (sampleCount + chunk.sampleCount <= maxSampleCount) { + lastSample = chunk.lastSample; + sampleCount += chunk.sampleCount; + return true; + } + return false; + } /** - * Creates a new Atom at the current position of the ImageOutputStream. - * - * @param type The type of the atom. A string with a length of 4 - * characters. + * Returns true, if the samples was added to the group. If false is + * returned, the sample must be added to a new group.

A sample can + * only be added to a group, if the capacity of the group is not + * exceeded. */ - public Atom(String type, long offset) { - this.type = type; - this.offset = offset; + protected boolean maybeAddSample(Sample sample) { + if (sampleCount < maxSampleCount) { + lastSample = sample; + sampleCount++; + return true; + } + return false; } + } + + /** + * QuickTime stores media data in samples. A sample is a single element in a + * sequence of time-ordered data. Samples are stored in the mdat atom. + */ + protected static class Sample { /** - * Writes the atom to the ImageOutputStream and disposes it. + * Offset of the sample relative to the start of the QuickTime file. */ - public abstract void finish() throws IOException; + long offset; + /** + * Data length of the sample. + */ + long length; + /** + * The duration of the sample in media timescale units. + */ + long duration; /** - * Returns the size of the atom including the size of the atom header. + * Creates a new sample. * - * @return The size of the atom. + * @param duration + * @param offset + * @param length */ - public abstract long size(); + public Sample(long duration, long offset, long length) { + this.duration = duration; + this.offset = offset; + this.length = length; + } } /** - * A CompositeAtom contains an ordered list of Atoms. + * Groups consecutive smples of the same duration. */ - protected class CompositeAtom extends DataAtom { + protected static class TimeToSampleGroup extends Group { - protected LinkedList children; + public TimeToSampleGroup(Sample firstSample) { + super(firstSample); + } + + public TimeToSampleGroup(Group group) { + super(group); + } /** - * Creates a new CompositeAtom at the current position of the - * ImageOutputStream. - * - * @param type The type of the atom. + * Returns the duration that all samples in this group share. */ - public CompositeAtom(String type) throws IOException { - super(type); - children = new LinkedList<>(); + public long getSampleDuration() { + return firstSample.duration; } - public void add(Atom child) throws IOException { - if (children.size() > 0) { - children.getLast().finish(); + @Override + public boolean maybeAddChunk(Chunk chunk) { + if (firstSample.duration == chunk.firstSample.duration) { + return super.maybeAddChunk(chunk); } - children.add(child); + return false; } /** - * Writes the atom and all its children to the ImageOutputStream and - * disposes of all resources held by the atom. - * - * @throws java.io.IOException + * Returns true, if the sample was added to the group. If false is + * returned, the sample must be added to a new group.

A sample can + * only be added to a TimeToSampleGroup, if it has the same duration as + * previously added samples, and if the capacity of the group is not + * exceeded. */ @Override - public void finish() throws IOException { - if (!finished) { - if (size() > 0xffffffffL) { - throw new IOException("CompositeAtom \"" + type + "\" is too large: " + size()); - } - - long pointer = getRelativeStreamPosition(); - seekRelative(offset); - - DataAtomOutputStream headerData = new DataAtomOutputStream(new ImageOutputStreamAdapter(out)); - headerData.writeInt((int) size()); - headerData.writeType(type); - for (Atom child : children) { - child.finish(); - } - seekRelative(pointer); - finished = true; + public boolean maybeAddSample(Sample sample) { + if (firstSample.duration == sample.duration) { + return super.maybeAddSample(sample); } - } - - @Override - public long size() { - long length = 8 + data.size(); - for (Atom child : children) { - length += child.size(); - } - return length; + return false; } } /** - * Data Atom. + * Groups consecutive samples of the same size. */ - protected class DataAtom extends Atom { - - protected DataAtomOutputStream data; - protected boolean finished; + protected static class SampleSizeGroup extends Group { - /** - * Creates a new DataAtom at the current position of the - * ImageOutputStream. - * - * @param type The type name of the atom. - */ - public DataAtom(String type) throws IOException { - super(type, getRelativeStreamPosition()); - out.writeLong(0); // make room for the atom header - data = new DataAtomOutputStream(new ImageOutputStreamAdapter(out)); + public SampleSizeGroup(Sample firstSample) { + super(firstSample); } - public DataAtomOutputStream getOutputStream() { - if (finished) { - throw new IllegalStateException("DataAtom is finished"); - } - return data; + public SampleSizeGroup(Group group) { + super(group); } /** - * Returns the offset of this atom to the beginning of the random access - * file + * Returns the length that all samples in this group share. */ - public long getOffset() { - return offset; + public long getSampleLength() { + return firstSample.length; } @Override - public void finish() throws IOException { - if (!finished) { - long sizeBefore = size(); - - if (size() > 0xffffffffL) { - throw new IOException("DataAtom \"" + type + "\" is too large: " + size()); - } - - long pointer = getRelativeStreamPosition(); - seekRelative(offset); - - DataAtomOutputStream headerData = new DataAtomOutputStream(new ImageOutputStreamAdapter(out)); - headerData.writeUInt(size()); - headerData.writeType(type); - seekRelative(pointer); - finished = true; - long sizeAfter = size(); - if (sizeBefore != sizeAfter) { - System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); - } + public boolean maybeAddChunk(Chunk chunk) { + if (firstSample.length == chunk.firstSample.length) { + return super.maybeAddChunk(chunk); } + return false; } + /** + * Returns true, if the sample was added to the group. If false is + * returned, the sample must be added to a new group.

A sample can + * only be added to a SampleSizeGroup, if it has the same size as + * previously added samples, and if the capacity of the group is not + * exceeded. + */ @Override - public long size() { - return 8 + data.size(); + public boolean maybeAddSample(Sample sample) { + if (firstSample.length == sample.length) { + return super.maybeAddSample(sample); + } + return false; } } /** - * WideDataAtom can grow larger then 4 gigabytes. + * Groups consecutive samples with the same sample description Id and with + * adjacent offsets in the movie file. */ - protected class WideDataAtom extends Atom { + protected static class Chunk extends Group { - protected DataAtomOutputStream data; - protected boolean finished; + public int sampleDescriptionId; /** - * Creates a new DataAtom at the current position of the - * ImageOutputStream. + * Creates a new Chunk. * - * @param type The type of the atom. + * @param firstSample The first sample contained in this chunk. + * @param sampleDescriptionId The description Id of the sample. */ - public WideDataAtom(String type) throws IOException { - super(type, getRelativeStreamPosition()); - out.writeLong(0); // make room for the atom header - out.writeLong(0); // make room for the atom header - data = new DataAtomOutputStream(new ImageOutputStreamAdapter(out)) { - @Override - public void flush() throws IOException { - // DO NOT FLUSH UNDERLYING STREAM! - } - }; + public Chunk(Sample firstSample, int sampleDescriptionId) { + super(firstSample); + this.sampleDescriptionId = sampleDescriptionId; } - public DataAtomOutputStream getOutputStream() { - if (finished) { - throw new IllegalStateException("Atom is finished"); - } - return data; + /** + * Creates a new Chunk. + * + * @param firstSample The first sample contained in this chunk. + * @param sampleDescriptionId The description Id of the sample. + */ + public Chunk(Sample firstSample, Sample lastSample, int sampleCount, int sampleDescriptionId) { + super(firstSample, lastSample, sampleCount); + this.sampleDescriptionId = sampleDescriptionId; } /** - * Returns the offset of this atom to the beginning of the random access - * file + * Returns the offset of the chunk in the movie file. */ - public long getOffset() { - return offset; + public long getChunkOffset() { + return firstSample.offset; } @Override - public void finish() throws IOException { - if (!finished) { - long pointer = getRelativeStreamPosition(); - seekRelative(offset); - - DataAtomOutputStream headerData = new DataAtomOutputStream(new ImageOutputStreamAdapter(out)); - long finishedSize = size(); - if (finishedSize <= 0xffffffffL) { - headerData.writeUInt(8); - headerData.writeType("wide"); - headerData.writeUInt(finishedSize - 8); - headerData.writeType(type); - } else { - headerData.writeInt(1); // special value for extended size atoms - headerData.writeType(type); - headerData.writeLong(finishedSize - 8); - } - - seekRelative(pointer); - finished = true; + public boolean maybeAddChunk(Chunk chunk) { + if (sampleDescriptionId == chunk.sampleDescriptionId // + && lastSample.offset + lastSample.length == chunk.firstSample.offset) { + return super.maybeAddChunk(chunk); } + return false; } - @Override - public long size() { - return 16 + data.size(); + /** + * Returns true, if the sample was added to the chunk. If false is + * returned, the sample must be added to a new chunk.

A sample can + * only be added to a chunk, if it has the same sample description Id as + * previously added samples, if the capacity of the chunk is not + * exceeded and if the sample offset is adjacent to the last sample in + * this chunk. + */ + public boolean maybeAddSample(Sample sample, int sampleDescriptionId) { + if (sampleDescriptionId == this.sampleDescriptionId + && lastSample.offset + lastSample.length == sample.offset) { + return super.maybeAddSample(sample); + } + return false; } } /** - * Groups consecutive samples with same characteristics. + * An {@code Edit} define the portions of the media that are to be used to + * build up a track for a movie. The edits themselves are stored in an edit + * list table, which consists of time offset and duration values for each + * segment.

In the absence of an edit list, the presentation of the + * track starts immediately. An empty edit is used to offset the start time + * of a track. */ - protected abstract static class Group { - - protected Sample firstSample; - protected Sample lastSample; - protected long sampleCount; - protected final static long maxSampleCount = Integer.MAX_VALUE; - - protected Group(Sample firstSample) { - this.firstSample = this.lastSample = firstSample; - sampleCount = 1; - } - - protected Group(Sample firstSample, Sample lastSample, long sampleCount) { - this.firstSample = firstSample; - this.lastSample = lastSample; - this.sampleCount = sampleCount; - if (sampleCount > maxSampleCount) { - throw new IllegalArgumentException("Capacity exceeded"); - } - } + public static class Edit { - protected Group(Group group) { - this.firstSample = group.firstSample; - this.lastSample = group.lastSample; - sampleCount = group.sampleCount; - } + /** + * A 32-bit fixed-point number (16.16) that specifies the relative rate + * at which to play the media corresponding to this edit segment. This + * rate value cannot be 0 or negative. + */ + public int mediaRate; + /** + * A 32-bit integer containing the start time within the media of this + * edit segment (in media timescale units). If this field is set to -1, + * it is an empty edit. The last edit in a track should never be an + * empty edit. Any differece between the movie's duration and the + * track's duration is expressed as an implicit empty edit. + */ + public int mediaTime; + /** + * A 32-bit integer that specifies the duration of this edit segment in + * units of the movie's timescale. + */ + public int trackDuration; /** - * Returns true, if the samples was added to the group. If false is - * returned, the sample must be added to a new group.

A sample can - * only be added to a group, if the capacity of the group is not - * exceeded. + * Creates an edit. + * + * @param trackDuration Duration of this edit in the movie's timescale. + * @param mediaTime Start time of this edit in the media's timescale. + * Specify -1 for an empty edit. The last edit in a track should never + * be an empty edit. + * @param mediaRate The relative rate at which to play this edit. */ - protected boolean maybeAddSample(Sample sample) { - if (sampleCount < maxSampleCount) { - lastSample = sample; - sampleCount++; - return true; + public Edit(int trackDuration, int mediaTime, double mediaRate) { + if (trackDuration < 0) { + throw new IllegalArgumentException("trackDuration must not be < 0:" + trackDuration); } - return false; + if (mediaTime < -1) { + throw new IllegalArgumentException("mediaTime must not be < -1:" + mediaTime); + } + if (mediaRate <= 0) { + throw new IllegalArgumentException("mediaRate must not be <= 0:" + mediaRate); + } + this.trackDuration = trackDuration; + this.mediaTime = mediaTime; + this.mediaRate = (int) (mediaRate * (1 << 16)); } /** - * Returns true, if the chunk was added to the group. If false is - * returned, the chunk must be added to a new group.

A chunk can - * only be added to a group, if the capacity of the group is not - * exceeded. + * Creates an edit.

Use this constructor only if you want to compute + * the fixed point media rate by yourself. + * + * @param trackDuration Duration of this edit in the movie's timescale. + * @param mediaTime Start time of this edit in the media's timescale. + * Specify -1 for an empty edit. The last edit in a track should never + * be an empty edit. + * @param mediaRate The relative rate at which to play this edit given + * as a 16.16 fixed point value. */ - protected boolean maybeAddChunk(Chunk chunk) { - if (sampleCount + chunk.sampleCount <= maxSampleCount) { - lastSample = chunk.lastSample; - sampleCount += chunk.sampleCount; - return true; + public Edit(int trackDuration, int mediaTime, int mediaRate) { + if (trackDuration < 0) { + throw new IllegalArgumentException("trackDuration must not be < 0:" + trackDuration); } - return false; - } - - public long getSampleCount() { - return sampleCount; + if (mediaTime < -1) { + throw new IllegalArgumentException("mediaTime must not be < -1:" + mediaTime); + } + if (mediaRate <= 0) { + throw new IllegalArgumentException("mediaRate must not be <= 0:" + mediaRate); + } + this.trackDuration = trackDuration; + this.mediaTime = mediaTime; + this.mediaRate = mediaRate; } } /** - * QuickTime stores media data in samples. A sample is a single element in a - * sequence of time-ordered data. Samples are stored in the mdat atom. + * Atom base class. */ - protected static class Sample { + protected abstract class Atom { /** - * Offset of the sample relative to the start of the QuickTime file. - */ - long offset; - /** - * Data length of the sample. + * The type of the atom. A String with the length of 4 characters. */ - long length; + protected String type; /** - * The duration of the sample in media time scale units. + * The offset of the atom relative to the start of the + * ImageOutputStream. */ - long duration; + protected long offset; /** - * Creates a new sample. + * Creates a new Atom at the current position of the ImageOutputStream. * - * @param duration - * @param offset - * @param length + * @param type The type of the atom. A string with a length of 4 + * characters. */ - public Sample(long duration, long offset, long length) { - this.duration = duration; + public Atom(String type, long offset) { + this.type = type; this.offset = offset; - this.length = length; } + + /** + * Writes the atom to the ImageOutputStream and disposes it. + */ + public abstract void finish() throws IOException; + + /** + * Returns the size of the atom including the size of the atom header. + * + * @return The size of the atom. + */ + public abstract long size(); } /** - * Groups consecutive smples of the same duration. + * A CompositeAtom contains an ordered list of Atoms. */ - protected static class TimeToSampleGroup extends Group { - - public TimeToSampleGroup(Sample firstSample) { - super(firstSample); - } + protected class CompositeAtom extends DataAtom { - public TimeToSampleGroup(Group group) { - super(group); - } + protected LinkedList children; /** - * Returns true, if the sample was added to the group. If false is - * returned, the sample must be added to a new group.

A sample can - * only be added to a TimeToSampleGroup, if it has the same duration as - * previously added samples, and if the capacity of the group is not - * exceeded. + * Creates a new CompositeAtom at the current position of the + * ImageOutputStream. + * + * @param type The type of the atom. */ - @Override - public boolean maybeAddSample(Sample sample) { - if (firstSample.duration == sample.duration) { - return super.maybeAddSample(sample); - } - return false; + public CompositeAtom(String type) throws IOException { + super(type); + children = new LinkedList<>(); } - @Override - public boolean maybeAddChunk(Chunk chunk) { - if (firstSample.duration == chunk.firstSample.duration) { - return super.maybeAddChunk(chunk); + public void add(Atom child) throws IOException { + if (children.size() > 0) { + children.get(children.size() - 1).finish(); } - return false; + children.add(child); } /** - * Returns the duration that all samples in this group share. + * Writes the atom and all its children to the ImageOutputStream and + * disposes of all resources held by the atom. + * + * @throws java.io.IOException */ - public long getSampleDuration() { - return firstSample.duration; + @Override + public void finish() throws IOException { + if (!finished) { + if (size() > 0xffffffffL) { + throw new IOException("CompositeAtom \"" + type + "\" is too large: " + size()); + } + + long pointer = getRelativeStreamPosition(); + seekRelative(offset); + + QTFFImageOutputStream headerData = new QTFFImageOutputStream(out); + headerData.writeInt((int) size()); + headerData.writeType(type); + for (Atom child : children) { + child.finish(); + } + seekRelative(pointer); + finished = true; + } + } + + @Override + public long size() { + long length = 8 + data.length(); + for (Atom child : children) { + length += child.size(); + } + return length; } } /** - * Groups consecutive samples of the same size. + * Data Atom. */ - protected static class SampleSizeGroup extends Group { - - public SampleSizeGroup(Sample firstSample) { - super(firstSample); - } + protected class DataAtom extends Atom { - public SampleSizeGroup(Group group) { - super(group); - } + protected QTFFImageOutputStream data; + protected boolean finished; /** - * Returns true, if the sample was added to the group. If false is - * returned, the sample must be added to a new group.

A sample can - * only be added to a SampleSizeGroup, if it has the same size as - * previously added samples, and if the capacity of the group is not - * exceeded. + * Creates a new DataAtom at the current position of the + * ImageOutputStream. + * + * @param type The type name of the atom. */ - @Override - public boolean maybeAddSample(Sample sample) { - if (firstSample.length == sample.length) { - return super.maybeAddSample(sample); - } - return false; + public DataAtom(String type) throws IOException { + super(type, getRelativeStreamPosition()); + out.writeLong(0); // make room for the atom header + data = new QTFFImageOutputStream(out); } @Override - public boolean maybeAddChunk(Chunk chunk) { - if (firstSample.length == chunk.firstSample.length) { - return super.maybeAddChunk(chunk); + public void finish() throws IOException { + if (!finished) { + long sizeBefore = size(); + + if (size() > 0xffffffffL) { + throw new IOException("DataAtom \"" + type + "\" is too large: " + size()); + } + + long pointer = getRelativeStreamPosition(); + seekRelative(offset); + + QTFFImageOutputStream headerData = new QTFFImageOutputStream(out); + headerData.writeInt((int) size()); + headerData.writeType(type); + seekRelative(pointer); + finished = true; + long sizeAfter = size(); + if (sizeBefore != sizeAfter) { + System.err.println("size mismatch " + sizeBefore + ".." + sizeAfter); + } } - return false; } /** - * Returns the length that all samples in this group share. + * Returns the offset of this atom to the beginning of the random access + * file */ - public long getSampleLength() { - return firstSample.length; + public long getOffset() { + return offset; + } + + public QTFFImageOutputStream getOutputStream() { + if (finished) { + throw new IllegalStateException("DataAtom is finished"); + } + return data; + } + + @Override + public long size() { + return 8 + data.length(); } } /** - * Groups consecutive samples with the same sample description Id and with - * adjacent offsets in the movie file. + * WideDataAtom can grow larger then 4 gigabytes. */ - protected static class Chunk extends Group { + protected class WideDataAtom extends Atom { - protected int sampleDescriptionId; + protected QTFFImageOutputStream data; + protected boolean finished; /** - * Creates a new Chunk. + * Creates a new DataAtom at the current position of the + * ImageOutputStream. * - * @param firstSample The first sample contained in this chunk. - * @param sampleDescriptionId The description Id of the sample. + * @param type The type of the atom. */ - public Chunk(Sample firstSample, int sampleDescriptionId) { - super(firstSample); - this.sampleDescriptionId = sampleDescriptionId; + public WideDataAtom(String type) throws IOException { + super(type, getRelativeStreamPosition()); + out.writeLong(0); // make room for the atom header + out.writeLong(0); // make room for the atom header + data = new QTFFImageOutputStream(out) { + @Override + public void flush() throws IOException { + // DO NOT FLUSH UNDERLYING STREAM! + } + }; } - /** - * Creates a new Chunk. - * - * @param firstSample The first sample contained in this chunk. - * @param sampleDescriptionId The description Id of the sample. - */ - public Chunk(Sample firstSample, Sample lastSample, int sampleCount, int sampleDescriptionId) { - super(firstSample, lastSample, sampleCount); - this.sampleDescriptionId = sampleDescriptionId; + @Override + public void finish() throws IOException { + if (!finished) { + long pointer = getRelativeStreamPosition(); + seekRelative(offset); + + QTFFImageOutputStream headerData = new QTFFImageOutputStream(out); + long finishedSize = size(); + if (finishedSize <= 0xffffffffL) { + headerData.writeInt(8); + headerData.writeType("wide"); + headerData.writeInt((int) (finishedSize - 8)); + headerData.writeType(type); + } else { + headerData.writeInt(1); // special value for extended size atoms + headerData.writeType(type); + headerData.writeLong(finishedSize - 8); + } + + seekRelative(pointer); + finished = true; + } } /** - * Returns true, if the sample was added to the chunk. If false is - * returned, the sample must be added to a new chunk.

A sample can - * only be added to a chunk, if it has the same sample description Id as - * previously added samples, if the capacity of the chunk is not - * exceeded and if the sample offset is adjacent to the last sample in - * this chunk. + * Returns the offset of this atom to the beginning of the random access + * file */ - public boolean maybeAddSample(Sample sample, int sampleDescriptionId) { - if (sampleDescriptionId == this.sampleDescriptionId - && lastSample.offset + lastSample.length == sample.offset) { - return super.maybeAddSample(sample); - } - return false; + public long getOffset() { + return offset; } - @Override - public boolean maybeAddChunk(Chunk chunk) { - if (sampleDescriptionId == chunk.sampleDescriptionId // - && lastSample.offset + lastSample.length == chunk.firstSample.offset) { - return super.maybeAddChunk(chunk); + public QTFFImageOutputStream getOutputStream() { + if (finished) { + throw new IllegalStateException("Atom is finished"); } - return false; + return data; } - /** - * Returns the offset of the chunk in the movie file. - */ - public long getChunkOffset() { - return firstSample.offset; + @Override + public long size() { + return 16 + data.length(); } } @@ -685,149 +760,110 @@ public long getChunkOffset() { protected abstract class Track { // Common metadata + private final static int TrackEnable = 0x1; // enabled track + private final static int TrackInMovie = 0x2;// track in playback + private final static int TrackInPreview = 0x4; // track in preview + private final static int TrackInPoster = 0x8; // track in posterTrackEnable = 0x1, // enabled track /** * The media type of the track. */ - protected final MediaType mediaType; + public final MediaType mediaType; /** - * The format of the media in the track. - */ - protected Format format; - /** - * The timeScale of the media in the track. A time value that indicates - * the time scale for this media. That is, the number of time units that - * pass per second in its time coordinate system. - */ - protected long mediaTimeScale = 600; - /** - * The compression type of the media. + * List of chunks. */ - protected String mediaCompressionType; + public ArrayList chunks = new ArrayList<>(); + public String componentName = DEFAULT_COMPONENT_NAME; + public String componentManufacturer = DEFAULT_COMPONENT_MANUFACTURER; /** - * The compressor name. + * The edit list of the track. */ - protected String mediaCompressorName; + public Edit[] editList; /** - * List of chunks. + * The format of the media in the track. */ - protected ArrayList chunks = new ArrayList<>(); + public Format format; /** - * List of TimeToSample entries. + *

+         * // Enumeration for track header flags
+         * set {
+         * TrackEnable = 0x1, // enabled track
+         * TrackInMovie = 0x2, // track in playback
+         * TrackInPreview = 0x4, // track in preview
+         * TrackInPoster = 0x8 // track in poster
+         * } TrackHeaderFlags;
+         * 
*/ - protected ArrayList timeToSamples = new ArrayList<>(); + public int headerFlags = TrackEnable | TrackInMovie | TrackInPreview | TrackInPoster; + public double height; /** - * List of SampleSize entries. + * The transformation matrix of the track. */ - protected ArrayList sampleSizes = new ArrayList<>(); + public double[] matrix = {// + 1, 0, 0,// + 0, 1, 0,// + 0, 0, 1 + }; /** - * List of sync samples. This list is null as long as all samples in - * this track are sync samples. + * The compression type of the media. */ - protected ArrayList syncSamples = null; + public String mediaCompressionType; /** - * The number of samples in this track. + * The compressor name. */ - protected long sampleCount = 0; + public String mediaCompressorName; /** * The duration of the media in this track in media time units. */ - protected long mediaDuration = 0; + public long mediaDuration = 0; /** - * The edit list of the track. + * The timeScale of the media in the track. A time value that indicates + * the timescale for this media. That is, the number of time units that + * pass per second in its time coordinate system. */ - protected Edit[] editList; + public long mediaTimeScale = 600; /** - * Interval between sync samples (keyframes). 0 = automatic. 1 = write - * all samples as sync samples. n = sync every n-th sample. + * The number of samples in this track. */ - protected int syncInterval; + public long sampleCount = 0; /** - * The codec. + * List of SampleSize entries. */ - protected Codec codec; - protected Buffer outputBuffer; - protected Buffer inputBuffer; + public ArrayList sampleSizes = new ArrayList<>(); /** - * Start time of the first buffer that was added to the track. + * Start time of the track. */ - protected Rational inputTime; + public Rational startTime; /** - * Current write time. + * Interval between sync samples (keyframes). 0 = automatic. 1 = write + * all samples as sync samples. n = sync every n-th sample. */ - protected Rational writeTime; + public int syncInterval; /** - * The transformation matrix of the track. - */ - protected double[] matrix = {// - 1, 0, 0,// - 0, 1, 0,// - 0, 0, 1 - }; - protected double width, height; - - private final static int TrackEnable = 0x1; // enabled track - private final static int TrackInMovie = 0x2;// track in playback - private final static int TrackInPreview = 0x4; // track in preview - private final static int TrackInPoster = 0x8; // track in posterTrackEnable = 0x1, // enabled track - + * List of sync samples. This list is null as long as all samples in + * this track are sync samples. + */ + public ArrayList syncSamples = null; /** - *
-         * // Enumeration for track header flags
-         * set {
-         * TrackEnable = 0x1, // enabled track
-         * TrackInMovie = 0x2, // track in playback
-         * TrackInPreview = 0x4, // track in preview
-         * TrackInPoster = 0x8 // track in poster
-         * } TrackHeaderFlags;
-         * 
+ * List of TimeToSample entries. */ - protected int headerFlags = TrackEnable | TrackInMovie | TrackInPreview | TrackInPoster; + public ArrayList timeToSamples = new ArrayList<>(); + public double width; public Track(MediaType mediaType) { this.mediaType = mediaType; } - public void setEnabled(boolean newValue) { - headerFlags = (newValue) ? headerFlags | TrackEnable : headerFlags & (0xff ^ TrackEnable); - } - - public boolean isEnabled() { - return (headerFlags & TrackEnable) != 0; - } - - public void setInMovie(boolean newValue) { - headerFlags = (newValue) ? headerFlags | TrackInMovie : headerFlags & (0xff ^ TrackInMovie); - } - - public boolean isInMovie() { - return (headerFlags & TrackInPreview) != 0; - } - - public void setInPreview(boolean newValue) { - headerFlags = (newValue) ? headerFlags | TrackInPreview : headerFlags & (0xff ^ TrackInPreview); - } - - public boolean isInPreview() { - return (headerFlags & TrackInPreview) != 0; - } - - public void setInPoster(boolean newValue) { - headerFlags = (newValue) ? headerFlags | TrackInPoster : headerFlags & (0xff ^ TrackInPoster); - } - - public boolean isInPoster() { - return (headerFlags & TrackInPoster) != 0; - } - - public void addSample(Sample sample, int sampleDescriptionId, boolean isSyncSample) { - mediaDuration += sample.duration; - sampleCount++; + public void addChunk(Chunk chunk, boolean isSyncSample) { + mediaDuration += chunk.firstSample.duration * chunk.sampleCount; + sampleCount += chunk.sampleCount; // Keep track of sync samples. If all samples in a track are sync // samples, we do not need to create a syncSample list. if (isSyncSample) { if (syncSamples != null) { - syncSamples.add(sampleCount); + for (long i = sampleCount - chunk.sampleCount; i < sampleCount; i++) { + syncSamples.add(i); + } } } else { if (syncSamples == null) { @@ -840,30 +876,28 @@ public void addSample(Sample sample, int sampleDescriptionId, boolean isSyncSamp // if (timeToSamples.isEmpty()// - || !timeToSamples.getLast().maybeAddSample(sample)) { - timeToSamples.add(new TimeToSampleGroup(sample)); + || !timeToSamples.get(timeToSamples.size() - 1).maybeAddChunk(chunk)) { + timeToSamples.add(new TimeToSampleGroup(chunk)); } if (sampleSizes.isEmpty()// - || !sampleSizes.getLast().maybeAddSample(sample)) { - sampleSizes.add(new SampleSizeGroup(sample)); + || !sampleSizes.get(sampleSizes.size() - 1).maybeAddChunk(chunk)) { + sampleSizes.add(new SampleSizeGroup(chunk)); } if (chunks.isEmpty()// - || !chunks.getLast().maybeAddSample(sample, sampleDescriptionId)) { - chunks.add(new Chunk(sample, sampleDescriptionId)); + || !chunks.get(chunks.size() - 1).maybeAddChunk(chunk)) { + chunks.add(chunk); } } - public void addChunk(Chunk chunk, boolean isSyncSample) { - mediaDuration += chunk.firstSample.duration * chunk.sampleCount; - sampleCount += chunk.sampleCount; + public void addSample(Sample sample, int sampleDescriptionId, boolean isSyncSample) { + mediaDuration += sample.duration; + sampleCount++; // Keep track of sync samples. If all samples in a track are sync // samples, we do not need to create a syncSample list. if (isSyncSample) { if (syncSamples != null) { - for (long i = sampleCount - chunk.sampleCount; i < sampleCount; i++) { - syncSamples.add(i); - } + syncSamples.add(sampleCount); } } else { if (syncSamples == null) { @@ -876,21 +910,26 @@ public void addChunk(Chunk chunk, boolean isSyncSample) { // if (timeToSamples.isEmpty()// - || !timeToSamples.getLast().maybeAddChunk(chunk)) { - timeToSamples.add(new TimeToSampleGroup(chunk)); + || !timeToSamples.get(timeToSamples.size() - 1).maybeAddSample(sample)) { + timeToSamples.add(new TimeToSampleGroup(sample)); } if (sampleSizes.isEmpty()// - || !sampleSizes.getLast().maybeAddChunk(chunk)) { - sampleSizes.add(new SampleSizeGroup(chunk)); + || !sampleSizes.get(sampleSizes.size() - 1).maybeAddSample(sample)) { + sampleSizes.add(new SampleSizeGroup(sample)); } if (chunks.isEmpty()// - || !chunks.getLast().maybeAddChunk(chunk)) { - chunks.add(chunk); + || !chunks.get(chunks.size() - 1).maybeAddSample(sample, sampleDescriptionId)) { + chunks.add(new Chunk(sample, sampleDescriptionId)); } } - public boolean isEmpty() { - return sampleCount == 0; + /** + * Gets the time of the first sample in the movie timescale. + * + * @param movieTimeScale The timescale of the movie. + */ + public int getFirstSampleTime(long movieTimeScale) { + return startTime == null ? 0 : startTime.multiply(movieTimeScale).intValue(); } public long getSampleCount() { @@ -898,9 +937,9 @@ public long getSampleCount() { } /** - * Gets the track duration in the movie time scale. + * Gets the track duration in the movie timescale. * - * @param movieTimeScale The time scale of the movie. + * @param movieTimeScale The timescale of the movie. */ public long getTrackDuration(long movieTimeScale) { if (editList == null || editList.length == 0) { @@ -914,236 +953,81 @@ public long getTrackDuration(long movieTimeScale) { } } - /** - * Gets the time of the first sample in the movie time scale. - * - * @param movieTimeScale The time scale of the movie. - */ - public int getFirstSampleTime(long movieTimeScale) { - return inputTime == null ? 0 : inputTime.multiply(movieTimeScale).intValue(); + public boolean isEmpty() { + return sampleCount == 0; + } + + public boolean isEnabled() { + return (headerFlags & TrackEnable) != 0; + } + + public void setEnabled(boolean newValue) { + headerFlags = (newValue) ? headerFlags | TrackEnable : headerFlags & (0xff ^ TrackEnable); } - // protected abstract void writeMediaInformationHeaderAtom(CompositeAtom minfAtom) throws IOException; - protected abstract void writeSampleDescriptionAtom(CompositeAtom stblAtom) throws IOException; + public boolean isInMovie() { + return (headerFlags & TrackInPreview) != 0; + } + + public void setInMovie(boolean newValue) { + headerFlags = (newValue) ? headerFlags | TrackInMovie : headerFlags & (0xff ^ TrackInMovie); + } + + public boolean isInPoster() { + return (headerFlags & TrackInPoster) != 0; + } + + public void setInPoster(boolean newValue) { + headerFlags = (newValue) ? headerFlags | TrackInPoster : headerFlags & (0xff ^ TrackInPoster); + } + + public boolean isInPreview() { + return (headerFlags & TrackInPreview) != 0; + } + + public void setInPreview(boolean newValue) { + headerFlags = (newValue) ? headerFlags | TrackInPreview : headerFlags & (0xff ^ TrackInPreview); + } } protected class VideoTrack extends Track { // Video metadata /** - * The video compression quality. + * AVC decoder configuration record. */ - protected float videoQuality = 0.97f; + public AvcDecoderConfigurationRecord avcDecoderConfigurationRecord; + /** + * The color table used for rendering the video. This variable is only + * used when the video uses an index color model. + */ + public IndexColorModel videoColorTable; /** * Number of bits per ixel. All frames must have the same depth. The * value -1 is used to mark unspecified depth. */ - protected int videoDepth = -1; + public int videoDepth = -1; /** - * The color table used for rendering the video. This variable is only - * used when the video uses an index color model. + * The video compression quality. */ - protected IndexColorModel videoColorTable; + public float videoQuality = 0.97f; public VideoTrack() { super(MediaType.VIDEO); } - @Override - protected void writeSampleDescriptionAtom(CompositeAtom stblAtom) throws IOException { - CompositeAtom leaf; - DataAtomOutputStream d; - - /* Sample Description atom ------- */ - // The sample description atom stores information that allows you to - // decode samples in the media. The data stored in the sample - // description varies, depending on the media type. For example, in the - // case of video media, the sample descriptions are image description - // structures. The sample description information for each media type is - // explained in “Media Data Atom Types”: - // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_1.html#//apple_ref/doc/uid/TP40000939-CH205-SW1 - leaf = new CompositeAtom("stsd"); - stblAtom.add(leaf); - /* - typedef struct { - byte version; - byte[3] flags; - int numberOfEntries; - sampleDescriptionEntry sampleDescriptionTable[numberOfEntries]; - } sampleDescriptionAtom; - - typedef struct { - int size; - magic type; - byte[6] reserved; // six bytes that must be zero - short dataReferenceIndex; // A 16-bit integer that contains the index - //of the data reference to use to retrieve - //data associated with samples that use this - //sample description. Data references are - //stored in data reference atoms. - byte[size - 16] data; - } sampleDescriptionEntry; - */ - d = leaf.getOutputStream(); - d.write(0); // version - // A 1-byte specification of the version of this sample description atom. - - d.write(0); // flag[0] - d.write(0); // flag[1] - d.write(0); // flag[2] - // A 3-byte space for sample description flags. Set this field to 0. - - d.writeInt(1); // number of Entries - // A 32-bit integer containing the number of sample descriptions that follow. - - // A 32-bit integer indicating the number of bytes in the sample description. - d.writeInt(86); // sampleDescriptionTable[0].size - - d.writeType(mediaCompressionType); // sampleDescriptionTable[0].type - - // A 32-bit integer indicating the format of the stored data. - // This depends on the media type, but is usually either the - // compression format or the media type. - - d.write(new byte[6]); // sampleDescriptionTable[0].reserved - // Six bytes that must be set to 0. - - d.writeShort(1); // sampleDescriptionTable[0].dataReferenceIndex - // A 16-bit integer that contains the index of the data - // reference to use to retrieve data associated with samples - // that use this sample description. Data references are stored - // in data reference atoms. - - // Video Sample Description - // ------------------------ - // The format of the following fields is described here: - // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_2.html#//apple_ref/doc/uid/TP40000939-CH205-BBCGICBJ - - d.writeShort(0); // sampleDescriptionTable.videoSampleDescription.version - // A 16-bit integer indicating the version number of the - // compressed data. This is set to 0, unless a compressor has - // changed its data format. - - d.writeShort(0); // sampleDescriptionTable.videoSampleDescription.revisionLevel - // A 16-bit integer that must be set to 0. - - d.writeType("java"); // sampleDescriptionTable.videoSampleDescription.manufacturer - // A 32-bit integer that specifies the developer of the - // compressor that generated the compressed data. Often this - // field contains 'appl' to indicate Apple Computer, Inc. - - d.writeInt(0); // sampleDescriptionTable.videoSampleDescription.temporalQuality - // A 32-bit integer containing a value from 0 to 1023 indicating - // the degree of temporal compression. - - d.writeInt((int) (1024 * (1 - videoQuality))); // sampleDescriptionTable.videoSampleDescription.spatialQuality - // A 32-bit integer containing a value from 0 to 1024 indicating - // the degree of spatial compression. - - d.writeUShort((int) width); // sampleDescriptionTable.videoSampleDescription.width - // A 16-bit integer that specifies the width of the source image - // in pixels. - - d.writeUShort((int) height); // sampleDescriptionTable.videoSampleDescription.height - // A 16-bit integer that specifies the height of the source image in pixels. - - d.writeFixed16D16(72.0); // sampleDescriptionTable.videoSampleDescription.horizontalResolution - // A 32-bit fixed-point number containing the horizontal - // resolution of the image in pixels per inch. - - d.writeFixed16D16(72.0); // sampleDescriptionTable.videoSampleDescription.verticalResolution - // A 32-bit fixed-point number containing the vertical - // resolution of the image in pixels per inch. - - d.writeInt(0); // sampleDescriptionTable.videoSampleDescription.dataSize - // A 32-bit integer that must be set to 0. - - d.writeShort(1); // sampleDescriptionTable.videoSampleDescription.sampleCount - // A 16-bit integer that indicates how many bytes of compressed - // data are stored in each sample. Usually set to 1. - - d.writePString(mediaCompressorName, 32); // sampleDescriptionTable.videoSampleDescription.compressorName - // A 32-byte Pascal string containing the name of the compressor - // that created the image, such as "jpeg". - - d.writeShort(videoDepth); // sampleDescriptionTable.videoSampleDescription.depth - // A 16-bit integer that indicates the pixel depth of the - // compressed image. Values of 1, 2, 4, 8 ,16, 24, and 32 - // indicate the depth of color images. The value 32 should be - // used only if the image contains an alpha channel. Values of - // 34, 36, and 40 indicate 2-, 4-, and 8-bit grayscale, - // respectively, for grayscale images. - - d.writeShort(videoColorTable == null ? -1 : 0); // sampleDescriptionTable.videoSampleDescription.colorTableID - // A 16-bit integer that identifies which color table to use. - // If this field is set to –1, the default color table should be - // used for the specified depth. For all depths below 16 bits - // per pixel, this indicates a standard Macintosh color table - // for the specified depth. Depths of 16, 24, and 32 have no - // color table. - } - - /** - * Color table atoms define a list of preferred colors for displaying - * the movie on devices that support only 256 colors. The list may - * contain up to 256 colors. These optional atoms have a type value of - * 'ctab'. The color table atom contains a Macintosh color table data - * structure. - * - * @param stblAtom - * @throws IOException - */ - protected void writeColorTableAtom(CompositeAtom stblAtom) throws IOException { - DataAtom leaf; - DataAtomOutputStream d; - leaf = new DataAtom("ctab"); - stblAtom.add(leaf); - - d = leaf.getOutputStream(); - - d.writeUInt(0); // Color table seed. A 32-bit integer that must be set to 0. - d.writeUShort(0x8000); // Color table flags. A 16-bit integer that must be set to 0x8000. - d.writeUShort(videoColorTable.getMapSize() - 1); - // Color table size. A 16-bit integer that indicates the number of - // colors in the following color array. This is a zero-relative value; - // setting this field to 0 means that there is one color in the array. - - for (int i = 0, n = videoColorTable.getMapSize(); i < n; ++i) { - // An array of colors. Each color is made of four unsigned 16-bit integers. - // The first integer must be set to 0, the second is the red value, - // the third is the green value, and the fourth is the blue value. - d.writeUShort(0); - d.writeUShort((videoColorTable.getRed(i) << 8) | videoColorTable.getRed(i)); - d.writeUShort((videoColorTable.getGreen(i) << 8) | videoColorTable.getGreen(i)); - d.writeUShort((videoColorTable.getBlue(i) << 8) | videoColorTable.getBlue(i)); - } - } } protected class AudioTrack extends Track { // Audio metadata /** - * Number of sound channels used by the sound sample. - */ - protected int soundNumberOfChannels; - /** - * Number of bits per audio sample before compression. - */ - protected int soundSampleSize; - /** - * Sound compressionId. The value -1 means fixed bit rate, -2 means - * variable bit rate. - */ - protected int soundCompressionId; - /** - * Sound stsd samples per packet. The number of uncompressed samples - * generated by a compressed sample (an uncompressed sample is one - * sample from each channel). This is also the sample duration, - * expressed in the media’s timescale, where the timescale is equal to - * the sample rate. For uncompressed formats, this field is always 1. + * The number of bytes in a frame: for uncompressed audio, an + * uncompressed frame; for compressed audio, a compressed frame. This + * can be calculated by multiplying the bytes per packet field by the + * number of channels. */ - protected long soundSamplesPerPacket; + public long soundBytesPerFrame; /** * For uncompressed audio, the number of bytes in a sample for a single * channel. This replaces the older sampleSize field, which is set to @@ -1152,275 +1036,49 @@ protected class AudioTrack extends Track { * of this field for compressed audio, but the result of the calculation * is not generally meaningful for compressed audio. */ - protected long soundBytesPerPacket; - /** - * The number of bytes in a frame: for uncompressed audio, an - * uncompressed frame; for compressed audio, a compressed frame. This - * can be calculated by multiplying the bytes per packet field by the - * number of channels. - */ - protected long soundBytesPerFrame; + public long soundBytesPerPacket; /** * The size of an uncompressed sample in bytes. This is set to 1 for * 8-bit audio, 2 for all other cases, even if the sample size is * greater than 2 bytes. */ - protected long soundBytesPerSample; + public long soundBytesPerSample; /** - * Sound sample rate. The integer portion must match the media's time - * scale. + * Sound compressionId. The value -1 means fixed bit rate, -2 means + * variable bit rate. */ - protected double soundSampleRate; + public int soundCompressionId; /** - * Extensions to the stsd chunk. Must contain atom-based fields: ([long - * size, long type, some data], repeat) + * Number of sound channels used by the sound sample. */ - protected byte[] stsdExtensions = new byte[0]; - - public AudioTrack() { - super(MediaType.AUDIO); - } - - @Override - protected void writeSampleDescriptionAtom(CompositeAtom stblAtom) throws IOException { - // TO DO - DataAtom leaf; - DataAtomOutputStream d; - - /* Sample Description atom ------- */ - // The sample description atom stores information that allows you to - // decode samples in the media. The data stored in the sample - // description varies, depending on the media type. For example, in the - // case of video media, the sample descriptions are image description - // structures. The sample description information for each media type is - // explained in “Media Data Atom Types”: - // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_1.html#//apple_ref/doc/uid/TP40000939-CH205-SW1 - leaf = new DataAtom("stsd"); - stblAtom.add(leaf); - /* - typedef struct { - byte version; - byte[3] flags; - int numberOfEntries; - soundSampleDescriptionEntry sampleDescriptionTable[numberOfEntries]; - } soundSampleDescriptionAtom; - - typedef struct { - int size; - magic type; - byte[6] reserved; - short dataReferenceIndex; - soundSampleDescription data; - } soundSampleDescriptionEntry; - - typedef struct { - ushort version; - ushort revisionLevel; - uint vendor; - ushort numberOfChannels; - ushort sampleSize; - short compressionId; - ushort packetSize; - fixed16d16 sampleRate; - byte[] extendedData; - } soundSampleDescription; - */ - d = leaf.getOutputStream(); - - // soundSampleDescriptionAtom: - // --------------------------- - d.write(0); // version - // A 1-byte specification of the version of this sample description atom. - - d.write(0); // flag[0] - d.write(0); // flag[1] - d.write(0); // flag[2] - // A 3-byte space for sample description flags. Set this field to 0. - - d.writeInt(1); // number of Entries - // A 32-bit integer containing the number of sample descriptions that follow. - - // soundSampleDescriptionEntry: - // ---------------------------- - // A 32-bit integer indicating the number of bytes in the sample description. - d.writeUInt(4 + 12 + 20 + 16 + stsdExtensions.length); // sampleDescriptionTable[0].size - - // Common header: 12 bytes - d.writeType(mediaCompressionType); // sampleDescriptionTable[0].type - // A 32-bit integer indicating the format of the stored data. - // This depends on the media type, but is usually either the - // compression format or the media type. - - d.write(new byte[6]); // sampleDescriptionTable[0].reserved - // Six bytes that must be set to 0. - - d.writeUShort(1); // sampleDescriptionTable[0].dataReferenceIndex - // A 16-bit integer that contains the index of the data - // reference to use to retrieve data associated with samples - // that use this sample description. Data references are stored - // in data reference atoms. - - // Sound Sample Description (Version 0) 20 bytes - // ------------------------ - - d.writeUShort(1); // version - // A 16-bit integer that holds the sample description version (currently 0 or 1). - - d.writeUShort(0); // revisionLevel - // A 16-bit integer that must be set to 0. - - d.writeUInt(0); // vendor - // A 32-bit integer that must be set to 0. - - d.writeUShort(soundNumberOfChannels); // numberOfChannels - // A 16-bit integer that indicates the number of sound channels used by - // the sound sample. Set to 1 for monaural sounds, 2 for stereo sounds. - // Higher numbers of channels are not supported. - - d.writeUShort(soundSampleSize); // sampleSize (bits) - // A 16-bit integer that specifies the number of bits in each - // uncompressed sound sample. Allowable values are 8 or 16. Formats - // using more than 16 bits per sample set this field to 16 and use sound - // description version 1. - - d.writeUShort(soundCompressionId); // compressionId - // XXX - This must be set to -1, or the QuickTime player won't accept this file. - // A 16-bit integer that must be set to 0 for version 0 sound - // descriptions. This may be set to –2 for some version 1 sound - // descriptions; see “Redefined Sample Tables” (page 135). - - d.writeUShort(0); // packetSize - // A 16-bit integer that must be set to 0. - - d.writeFixed16D16(soundSampleRate); // sampleRate - // A 32-bit unsigned fixed-point number (16.16) that indicates the rate - // at which the sound samples were obtained. The integer portion of this - // number should match the media’s time scale. Many older version 0 - // files have values of 22254.5454 or 11127.2727, but most files have - // integer values, such as 44100. Sample rates greater than 2^16 are not - // supported. - - // Sound Sample Description Additional fields (only in Version 1) 16 bytes - // ------------------------ - d.writeUInt(soundSamplesPerPacket); // samplesPerPacket - // A 32-bit integer. - // The number of uncompressed samples generated by a - // compressed sample (an uncompressed sample is one sample - // from each channel). This is also the sample duration, - // expressed in the media’s timescale, where the - // timescale is equal to the sample rate. For - // uncompressed formats, this field is always 1. - // - d.writeUInt(soundBytesPerPacket); // bytesPerPacket - // A 32-bit integer. - // For uncompressed audio, the number of bytes in a - // sample for a single channel. This replaces the older - // sampleSize field, which is set to 16. - // This value is calculated by dividing the frame size - // by the number of channels. The same calculation is - // performed to calculate the value of this field for - // compressed audio, but the result of the calculation - // is not generally meaningful for compressed audio. - // - d.writeUInt(soundBytesPerFrame); // bytesPerFrame - // A 32-bit integer. - // The number of bytes in a sample: for uncompressed - // audio, an uncompressed frame; for compressed audio, a - // compressed frame. This can be calculated by - // multiplying the bytes per packet field by the number - // of channels. - // - d.writeUInt(soundBytesPerSample); // bytesPerSample - // A 32-bit integer. - // The size of an uncompressed sample in bytes. This is - // set to 1 for 8-bit audio, 2 for all other cases, even - // if the sample size is greater than 2 bytes. - - // Write stsd Extensions - // Extensions must be atom-based fields - // ------------------------------------ - d.write(stsdExtensions); - } - } - - /** - * An {@code Edit} define the portions of the media that are to be used to - * build up a track for a movie. The edits themselves are stored in an edit - * list table, which consists of time offset and duration values for each - * segment.

In the absence of an edit list, the presentation of the - * track starts immediately. An empty edit is used to offset the start time - * of a track. - */ - public static class Edit { - + public int soundNumberOfChannels; /** - * A 32-bit integer that specifies the duration of this edit segment in - * units of the movie's time scale. + * Sound sample rate. The integer portion must match the media's time + * scale. */ - public int trackDuration; + public double soundSampleRate; /** - * A 32-bit integer containing the start time within the media of this - * edit segment (in media time scale units). If this field is set to -1, - * it is an empty edit. The last edit in a track should never be an - * empty edit. Any differece between the movie's duration and the - * track's duration is expressed as an implicit empty edit. + * Number of bits per audio sample before compression. */ - public int mediaTime; + public int soundSampleSize; /** - * A 32-bit fixed-point number (16.16) that specifies the relative rate - * at which to play the media corresponding to this edit segment. This - * rate value cannot be 0 or negative. + * Sound stsd samples per packet. The number of uncompressed samples + * generated by a compressed sample (an uncompressed sample is one + * sample from each channel). This is also the sample duration, + * expressed in the media’s timescale, where the timescale is equal to + * the sample rate. For uncompressed formats, this field is always 1. */ - public int mediaRate; - + public long soundSamplesPerPacket; /** - * Creates an edit. - * - * @param trackDuration Duration of this edit in the movie's time scale. - * @param mediaTime Start time of this edit in the media's time scale. - * Specify -1 for an empty edit. The last edit in a track should never - * be an empty edit. - * @param mediaRate The relative rate at which to play this edit. + * Extensions to the stsd chunk. Must contain atom-based fields: ([long + * size, long type, some data], repeat) */ - public Edit(int trackDuration, int mediaTime, double mediaRate) { - if (trackDuration < 0) { - throw new IllegalArgumentException("trackDuration must not be < 0:" + trackDuration); - } - if (mediaTime < -1) { - throw new IllegalArgumentException("mediaTime must not be < -1:" + mediaTime); - } - if (mediaRate <= 0) { - throw new IllegalArgumentException("mediaRate must not be <= 0:" + mediaRate); - } - this.trackDuration = trackDuration; - this.mediaTime = mediaTime; - this.mediaRate = (int) (mediaRate * (1 << 16)); - } + public byte[] stsdExtensions = new byte[0]; - /** - * Creates an edit.

Use this constructor only if you want to compute - * the fixed point media rate by yourself. - * - * @param trackDuration Duration of this edit in the movie's time scale. - * @param mediaTime Start time of this edit in the media's time scale. - * Specify -1 for an empty edit. The last edit in a track should never - * be an empty edit. - * @param mediaRate The relative rate at which to play this edit given - * as a 16.16 fixed point value. - */ - public Edit(int trackDuration, int mediaTime, int mediaRate) { - if (trackDuration < 0) { - throw new IllegalArgumentException("trackDuration must not be < 0:" + trackDuration); - } - if (mediaTime < -1) { - throw new IllegalArgumentException("mediaTime must not be < -1:" + mediaTime); - } - if (mediaRate <= 0) { - throw new IllegalArgumentException("mediaRate must not be <= 0:" + mediaRate); - } - this.trackDuration = trackDuration; - this.mediaTime = mediaTime; - this.mediaRate = mediaRate; + public AudioTrack() { + super(MediaType.AUDIO); } + + } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AvcDecoderConfigurationRecord.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AvcDecoderConfigurationRecord.java new file mode 100644 index 0000000..43ec3d9 --- /dev/null +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/AvcDecoderConfigurationRecord.java @@ -0,0 +1,58 @@ +/* + * @(#)AvcDecoderConfigurationRecord.java + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. + */ + +package org.monte.media.qtff; + +import org.monte.media.util.ByteArray; + +import java.util.Set; + +/** + * Header data for an encoded H.264 frame in a {@link org.monte.media.av.Buffer}. + *

+ * The {@link org.monte.media.av.Buffer#data} contains the data for video + * coding layer (VCL). The VCL data is embedded in data for the network + * abstraction layer (NAL). The NAL describes how the stream could be split + * into packets - Network Abstraction Layer units (NALUnit). + *

+ * There are two "classes" of NAL units types defined in ITU-T Specification's + * Annex A - VCL and non-VCL NAL units. The first one holds the encoded video, + * while the other does not contain video data at all. + *

+ * VCL: + *

+ *
Coded slice of a non-IDR picture (non-IDR)
+ *
contains a part or a complete non-keyframe + * (that is: P-frame or a B-frame)
+ *
Coded slice of an IDR picture (IDR)
+ *
contains a part or a complete keyframe (also known as I-frame). The name IDR (that stands for instantaneous decoding refresh) originates from the fact that the decoder can "forget" the previous frames when the new keyframe appears, since it contains the complete information about the frame.
+ *
+ * non-VCL: + *
+ *
Sequence parameter set (SPS)
+ *
contains metadata that is applicable to one or more coded video sequences. In that NALu you will find information allowing you to calculate the video resolution or H.264 profile.
+ *
Picture parameter set (PPS)
+ *
contains metadata applicable to one or more coded pictures
+ *
Access unit delimiter (AUD)
+ *
just a separator between access units
+ *
Supplemental enhancement information (SEI)
+ *
contains some additional metadata that "assist in processes related to decoding, display or other purposes". At the same time, information stored in SEI is not required to restore the picture during the decoding process, so the decoders are not obliged to process SEI. In fact, SEI is defined as Annex D to the ITU specification
+ *
+ * References: + *
+ *
H.264 layers - VCL vs NAL
membrane.stream
+ *
NALu types
membrane.stream
+ *
+ * + * @param sequenceParameterSetNALUnit List of SPS NALUnits. + * @param pictureParameterSetNALUnit List of PPS NALUnits. + */ +public record AvcDecoderConfigurationRecord( + int avcProfileIndication, + int profileCompatibility, + int avcLevelIndication, + int nalLengthSize, + Set sequenceParameterSetNALUnit, Set pictureParameterSetNALUnit) { +} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QTFFImageInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/QTFFImageInputStream.java similarity index 88% rename from org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QTFFImageInputStream.java rename to org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/QTFFImageInputStream.java index 0f8272a..d3c9b80 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QTFFImageInputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/QTFFImageInputStream.java @@ -1,8 +1,8 @@ /* * @(#)QTFFImageInputStream.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ -package org.monte.media.quicktime; +package org.monte.media.qtff; import org.monte.media.io.FilterImageInputStream; @@ -10,7 +10,7 @@ import java.io.IOException; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; -import java.util.Date; +import java.time.Instant; import java.util.GregorianCalendar; import static java.lang.Math.min; @@ -23,7 +23,7 @@ public class QTFFImageInputStream extends FilterImageInputStream { protected static final long MAC_TIMESTAMP_EPOCH = new GregorianCalendar(1904, GregorianCalendar.JANUARY, 1).getTimeInMillis(); - public QTFFImageInputStream(ImageInputStream in) { + public QTFFImageInputStream(ImageInputStream in) throws IOException { super(in); setByteOrder(ByteOrder.BIG_ENDIAN); } @@ -34,9 +34,9 @@ public QTFFImageInputStream(ImageInputStream in) { * @return the date corresponding to the Mac timestamp * @throws IOException if an I/O error occurs */ - public Date readMacTimestamp() throws IOException { + public Instant readMacTimestamp() throws IOException { long timestamp = ((long) readInt()) & 0xffffffffL; - return new Date(MAC_TIMESTAMP_EPOCH + timestamp * 1000); + return Instant.ofEpochMilli(MAC_TIMESTAMP_EPOCH + timestamp * 1000); } /** @@ -79,11 +79,7 @@ public String readType() throws IOException { public String readPString() throws IOException { int size = readUnsignedByte(); if (size == 0) { - size = readUnsignedByte(); - skipBytes(2); // why do we skip two bytes here? - } - if (size < 0) { - return ""; + size = readUnsignedShort(); } byte[] b = (size <= byteBuf.length) ? byteBuf : new byte[size]; readFully(b, 0, size); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/QTFFImageOutputStream.java similarity index 53% rename from org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomOutputStream.java rename to org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/QTFFImageOutputStream.java index 233b093..2cd92ff 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomOutputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/qtff/QTFFImageOutputStream.java @@ -1,17 +1,18 @@ /* - * @(#)DataAtomOutputStream.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + * @(#)QTFFImageOutputStream.java + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ -package org.monte.media.quicktime; +package org.monte.media.qtff; -import org.monte.media.io.ByteArray; -import java.io.FilterOutputStream; +import org.monte.media.io.FilterImageOutputStream; +import org.monte.media.util.ByteArrays; + +import javax.imageio.stream.ImageOutputStream; import java.io.IOException; -import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; -import java.util.Date; +import java.time.Instant; import java.util.GregorianCalendar; /** @@ -20,17 +21,12 @@ * * @author Werner Randelshofer */ -public class DataAtomOutputStream extends FilterOutputStream { +public class QTFFImageOutputStream extends FilterImageOutputStream { protected static final long MAC_TIMESTAMP_EPOCH = new GregorianCalendar(1904, GregorianCalendar.JANUARY, 1).getTimeInMillis(); - /** - * The number of bytes written to the data output stream so far. - * If this counter overflows, it will be wrapped to Integer.MAX_VALUE. - */ - protected long written; private final byte[] byteBuffer = new byte[8]; - public DataAtomOutputStream(OutputStream out) { + public QTFFImageOutputStream(ImageOutputStream out) throws IOException { super(out); } @@ -40,128 +36,45 @@ public DataAtomOutputStream(OutputStream out) { * @param s A string with a length of 4 characters. */ public void writeType(String s) throws IOException { + if (s == null) { + writeInt(0); + return; + } if (s.length() != 4) { throw new IllegalArgumentException("type string must have 4 characters"); } try { - out.write(s.getBytes(StandardCharsets.US_ASCII), 0, 4); - incCount(4); + write(s.getBytes(StandardCharsets.US_ASCII), 0, 4); } catch (UnsupportedEncodingException e) { throw new InternalError(e.toString()); } } - /** - * Writes out a byte to the underlying output stream as - * a 1-byte value. If no exception is thrown, the counter - * written is incremented by 1. - * - * @param v a byte value to be written. - * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out - */ - public final void writeByte(int v) throws IOException { - out.write(v); - incCount(1); - } /** - * Writes len bytes from the specified byte array - * starting at offset off to the underlying output stream. - * If no exception is thrown, the counter written is - * incremented by len. - * - * @param b the data. - * @param off the start offset in the data. - * @param len the number of bytes to write. - * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out - */ - @Override - public synchronized void write(byte[] b, int off, int len) - throws IOException { - out.write(b, off, len); - incCount(len); - } - - /** - * Writes the specified byte (the low eight bits of the argument - * b) to the underlying output stream. If no exception - * is thrown, the counter written is incremented by - * 1. - *

- * Implements the write method of OutputStream. - * - * @param b the byte to be written. - * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out - */ - @Override - public synchronized void write(int b) throws IOException { - out.write(b); - incCount(1); - } - - /** - * Writes an int to the underlying output stream as four - * bytes, high byte first. If no exception is thrown, the counter - * written is incremented by 4. + * Writes a BCD2 (one byte) to the underlying output stream. * * @param v an int to be written. * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out - */ - public void writeInt(int v) throws IOException { - ByteArray.setIntBE(byteBuffer, 0, v); - write(byteBuffer, 0, 4); - } - - /** - * Writes an unsigned 32-bit integer value. - * - * @param v The value to be written. - * @throws IOException if an I/O error occurs. - */ - public void writeUInt(long v) throws IOException { - ByteArray.setIntBE(byteBuffer, 0, (int) v); - write(byteBuffer, 0, 4); - } - - /** - * Writes a signed 16-bit integer value. - * - * @param v The value to be written. - * @throws IOException if an I/O error occurs. - */ - public void writeShort(int v) throws IOException { - ByteArray.setShortBE(byteBuffer, 0, (short) v); - write(byteBuffer, 0, 2); - } - - /** - * Writes a BCD2 to the underlying output stream. - * - * @param v an int to be written. - * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out */ public void writeBCD2(int v) throws IOException { - out.write(((v % 100 / 10) << 4) | (v % 10)); - incCount(1); + write(((v % 100 / 10) << 4) | (v % 10)); } /** - * Writes a BCD4 to the underlying output stream. + * Writes a BCD4 (two bytes) to the underlying output stream. * * @param v an int to be written. * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out */ public void writeBCD4(int v) throws IOException { - out.write(((v % 10000 / 1000) << 4) | (v % 1000 / 100)); - out.write(((v % 100 / 10) << 4) | (v % 10)); - incCount(2); + writeShort( + ((v % 10000 / 1000) << 12) + | ((v % 1000 / 100) << 8) + | ((v % 100 / 10) << 4) + | (v % 10) + ); } /** @@ -170,11 +83,11 @@ public void writeBCD4(int v) throws IOException { * @param date the date to be converted to a Mac timestamp * @throws IOException if an I/O error occurs */ - public void writeMacTimestamp(Date date) throws IOException { - long millis = date.getTime(); + public void writeMacTimestamp(Instant date) throws IOException { + long millis = date.toEpochMilli(); long qtMillis = millis - MAC_TIMESTAMP_EPOCH; long qtSeconds = qtMillis / 1000; - writeUInt(qtSeconds); + writeInt((int) qtSeconds); } /** @@ -182,7 +95,6 @@ public void writeMacTimestamp(Date date) throws IOException { * * @param f an int to be written. * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out */ public void writeFixed16D16(double f) throws IOException { double v = (f >= 0) ? f : -f; @@ -202,7 +114,6 @@ public void writeFixed16D16(double f) throws IOException { * * @param f an int to be written. * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out */ public void writeFixed2D30(double f) throws IOException { double v = (f >= 0) ? f : -f; @@ -222,7 +133,6 @@ public void writeFixed2D30(double f) throws IOException { * * @param f an int to be written. * @throws IOException if an I/O error occurs. - * @see FilterOutputStream#out */ public void writeFixed8D8(double f) throws IOException { double v = (f >= 0) ? f : -f; @@ -234,7 +144,22 @@ public void writeFixed8D8(double f) throws IOException { if (f < 0) { t = t - 1; } - writeUShort(t); + writeShort(t); + } + + /** + * Writes a zero-terminated C String. + * + * @param s the string to be written + * @throws IOException if an I/O error occurs + */ + public void writeCString(String s) throws IOException { + for (int i = 0; i < s.length(); i++) { + byte ch = (byte) s.charAt(i); + if (ch == 0) break; + write(ch); + } + write(0); } /** @@ -244,19 +169,23 @@ public void writeFixed8D8(double f) throws IOException { * @throws IOException if an I/O error occurs */ public void writePString(String s) throws IOException { + if (s == null) { + write(0); + writeShort(0); + return; + } if (s.length() > 0xffff) { throw new IllegalArgumentException("String too long for PString"); } if (!s.isEmpty() && s.length() < 256) { - out.write(s.length()); + write(s.length()); } else { - out.write(0); - writeShort(s.length()); // increments +2 + write(0); + writeShort(s.length()); } for (int i = 0; i < s.length(); i++) { - out.write(s.charAt(i)); + write(s.charAt(i)); } - incCount(1 + s.length()); } /** @@ -271,44 +200,21 @@ public void writePString(String s, int length) throws IOException { throw new IllegalArgumentException("String too long for PString of length " + length); } if (!s.isEmpty() && s.length() < 256) { - out.write(s.length()); + write(s.length()); } else { - out.write(0); + write(0); writeShort(s.length()); // increments +2 } for (int i = 0; i < s.length(); i++) { - out.write(s.charAt(i)); + write(s.charAt(i)); } // write pad bytes for (int i = 1 + s.length(); i < length; i++) { - out.write(0); + write(0); } - - incCount(length); - } - - public void writeLong(long v) throws IOException { - ByteArray.setLongBE(byteBuffer, 0, v); - write(byteBuffer, 0, 8); - } - - public void writeUShort(int v) throws IOException { - ByteArray.setShortBE(byteBuffer, 0, (short) v); - write(byteBuffer, 0, 2); } - /** - * Increases the written counter by the specified value - * until it reaches Long.MAX_VALUE. - */ - protected void incCount(int value) { - long temp = written + value; - if (temp < 0) { - temp = Long.MAX_VALUE; - } - written = temp; - } public void writeShorts(short[] s, int off, int len) throws IOException { // Fix 4430357 - if off + len < 0, overflow occurred @@ -348,7 +254,7 @@ public void writeInts(int[] i, int off, int len) throws IOException { public void writeInt24(int v) throws IOException { - ByteArray.setIntBE(byteBuffer, 0, v); + ByteArrays.setIntBE(byteBuffer, 0, v); write(byteBuffer, 0, 3); } @@ -371,14 +277,11 @@ public void writeInts24(int[] i, int off, int len) throws IOException { write(b, 0, len * 3); } - /** - * Returns the current value of the counter written, - * the number of bytes written to this data output stream so far. - * If the counter overflows, it will be wrapped to Integer.MAX_VALUE. - * - * @return the value of the written field. - */ - public final long size() { - return written; + public void writeUInt(long value) throws IOException { + writeInt((int) value); + } + + public void writeUShort(int value) throws IOException { + writeShort(value); } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomInputStream.java deleted file mode 100755 index 68f6ad1..0000000 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/DataAtomInputStream.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * @(#)DataAtomInputStream.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. - */ -package org.monte.media.quicktime; - -import org.monte.media.io.ByteArray; -import org.monte.media.io.ImageInputStreamAdapter; - -import javax.imageio.stream.ImageInputStream; -import java.io.EOFException; -import java.io.FilterInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.Date; -import java.util.GregorianCalendar; - -/** - * {@code DataAtomInputStream}. - * - * @author Werner Randelshofer - */ -public class DataAtomInputStream extends FilterInputStream { - - protected static final long MAC_TIMESTAMP_EPOCH = new GregorianCalendar(1904, GregorianCalendar.JANUARY, 1).getTimeInMillis(); - private final byte[] byteBuffer = new byte[8]; - - public DataAtomInputStream(InputStream in) { - super(in); - } - - public DataAtomInputStream(ImageInputStream in) { - super(new ImageInputStreamAdapter(in)); - } - - public final byte readByte() throws IOException { - int ch = read(); - if (ch < 0) { - throw new EOFException(); - } - return (byte) (ch); - } - - public final short readShort() throws IOException { - readFully(byteBuffer, 0, 2); - return ByteArray.getShortBE(byteBuffer, 0); - } - - public final int readInt() throws IOException { - readFully(byteBuffer, 0, 4); - return ByteArray.getIntBE(byteBuffer, 0); - } - - public final long readLong() throws IOException { - readFully(byteBuffer, 0, 8); - return ByteArray.getLongBE(byteBuffer, 0); - } - - public final int readUByte() throws IOException { - return readByte() & 0xFF; - } - - public final int readUShort() throws IOException { - return readShort() & 0xFFFF; - } - - public final long readUInt() throws IOException { - return readInt() & 0xFFFFFFFFL; - } - - public final long skipBytes(long n) throws IOException { - long total = 0; - long cur = 0; - - while ((total < n) && ((cur = (int) skip(n - total)) > 0)) { - total += cur; - } - - return total; - } - - public final void readFully(byte b[]) throws IOException { - readFully(b, 0, b.length); - } - - public final void readFully(byte b[], int off, int len) throws IOException { - if (len < 0) { - throw new IndexOutOfBoundsException(); - } - int n = 0; - while (n < len) { - int count = in.read(b, off + n, len - n); - if (count < 0) { - throw new EOFException(); - } - n += count; - } - } - - /** - * Reads a 32-bit Mac timestamp (seconds since 1902). - * - * @return date - * @throws java.io.IOException if an I/O error occurs - */ - public Date readMacTimestamp() throws IOException { - long timestamp = ((long) readInt()) & 0xffffffffL; - return new Date(MAC_TIMESTAMP_EPOCH + timestamp * 1000); - } - - /** - * Reads 32-bit fixed-point number divided as 16.16. - */ - public double readFixed16D16() throws IOException { - int wholePart = readUShort(); - int fractionPart = readUShort(); - - return (wholePart + fractionPart) / 65536.0; - } - - /** - * Reads 32-bit fixed-point number divided as 2.30. - */ - public double readFixed2D30() throws IOException { - int fixed = readInt(); - int wholePart = fixed >>> 30; - int fractionPart = fixed & 0x3fffffff; - - return (wholePart + fractionPart) / (double) 0x3fffffff; - } - - /** - * Reads 16-bit fixed-point number divided as 8.8. - */ - public double readFixed8D8() throws IOException { - int fixed = readUShort(); - int wholePart = fixed >>> 8; - int fractionPart = fixed & 0xff; - - return (wholePart + fractionPart) / 256.0; - } - - public String readType() throws IOException { - readFully(byteBuffer, 0, 4); - return new String(byteBuffer, 0, 4, StandardCharsets.US_ASCII); - } - - public String readPString() throws IOException { - int size = read(); - if (size == 0) { - size = read(); - skipBytes(2); // why do we skip two bytes here? - } - if (size < 0) { - return ""; - } - byte[] b = size <= byteBuffer.length ? byteBuffer : new byte[size]; - readFully(b, 0, size); - - return new String(b, 0, size, StandardCharsets.US_ASCII); - } - - /** - * Reads a Pascal String which is padded to a fixed size. - */ - public String readPString(int fixedSize) throws IOException { - int size = read(); - fixedSize--; - if (size == 0) { - size = read(); - skipBytes(2); // why do we skip two bytes here? - fixedSize -= 3; - } - if (size < 0) { - skipBytes(fixedSize); - return ""; - } - byte[] b = fixedSize <= byteBuffer.length ? byteBuffer : new byte[fixedSize]; - readFully(b, 0, fixedSize); - - return new String(b, 0, fixedSize, StandardCharsets.US_ASCII); - } -} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeDeserializer.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeDeserializer.java index f587b62..aba8239 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeDeserializer.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeDeserializer.java @@ -6,6 +6,8 @@ import org.monte.media.av.FormatKeys.MediaType; import org.monte.media.io.UncachedImageInputStream; +import org.monte.media.qtff.QTFFImageInputStream; +import org.monte.media.util.MathUtil; import javax.imageio.stream.FileImageInputStream; import javax.imageio.stream.ImageInputStream; @@ -156,7 +158,7 @@ protected void parseRecursively(QTFFImageInputStream in, long remainingSize, Qui // Perform recursion: parseRecursively(in, atom.size - atom.headerSize, m); } else { - QuickTimeMeta.Track track = (m.tracks.isEmpty()) ? null : m.tracks.getLast(); + QuickTimeMeta.Track track = (m.tracks.isEmpty()) ? null : m.tracks.get(m.tracks.size() - 1); QuickTimeMeta.Media media = (track == null) ? null : track.media; if (null != t) { @@ -850,6 +852,8 @@ protected void parseSoundSampleDescription(QTFFImageInputStream in, long remaini d.soundBytesPerFrame = in.readUnsignedInt(); d.soundBytesPerSample = in.readUnsignedInt(); remainingEntrySize -= 16; + } else { + d.soundBytesPerFrame = ((d.soundNumberOfChannels * d.soundSampleSize) + 7) / 8; } while (remainingEntrySize > 0) { @@ -1020,10 +1024,10 @@ protected void parseVideoSampleDescription(QTFFImageInputStream in, long remaini int vendor = in.readInt(); float value1 = in.readInt() / 1024f; - d.videoTemporalQuality = Math.clamp(value1, 0.0f, 1.0f); + d.videoTemporalQuality = MathUtil.clamp(value1, 0.0f, 1.0f); float value = in.readInt() / 1024f; - d.videoSpatialQuality = Math.clamp(value, 0.0f, 1.0f); + d.videoSpatialQuality = MathUtil.clamp(value, 0.0f, 1.0f); d.videoWidth = in.readUnsignedShort(); d.videoHeight = in.readUnsignedShort(); d.videoHorizontalResolution = in.readFixed16D16(); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeInputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeInputStream.java index d6a33db..96b0042 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeInputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeInputStream.java @@ -4,12 +4,14 @@ */ package org.monte.media.quicktime; +import org.monte.media.qtff.QTFFImageInputStream; + import javax.imageio.stream.FileImageInputStream; import javax.imageio.stream.ImageInputStream; import java.io.File; import java.io.IOException; import java.nio.ByteOrder; -import java.util.Date; +import java.time.Instant; /** * Provides low-level support for reading encoded audio and video samples from a @@ -30,6 +32,44 @@ public int getSampleCount(int track) throws IOException { return (int) tr.media.sampleCount; } + /** + * Gets the size of a sample in bytes. + * + * @param track The track index. + * @param sample The sample index. + * @return the size of the sample + * @throws IOException if reading the sample data failed. + */ + public int getSampleSize(int track, int sample) throws IOException { + QuickTimeMeta.Track tr = meta.tracks.get(track); + var ts = tr.trackSamplesList.get(sample); + var ms = ts.mediaSample; + return (int) ms.length; + } + + + /** + * Reads a sample from a track into a byte array. + * + * @param track The track index. + * @param sample The sample index. + * @param data The encoded sample data. + * @param off The startTime offset in the data. + * @param len The maximal number of bytes to read + * @return the actual number of samples read + * @throws IOException if reading the sample data failed. + */ + public int readSample(int track, int sample, byte[] data, int off, int len) throws IOException { + QuickTimeMeta.Track tr = meta.tracks.get(track); + var ts = tr.trackSamplesList.get(sample); + var ms = ts.mediaSample; + in.seek(ms.offset); + if (len < ms.length) throw new IOException("len=" + len + " is too small. Should be at least len=" + ms.length); + int bytesRead = Math.min((int) ms.length, len); + in.readFully(data, off, bytesRead); + return bytesRead; + } + /** * The image input stream. */ @@ -71,7 +111,7 @@ public int getTrackCount() throws IOException { return meta.getTrackCount(); } - public long getMovieDuration() throws IOException { + public long getMovieDurationInMovieTimeScale() throws IOException { ensureRealized(); long duration = 0; long movieTimeScale = meta.getTimeScale(); @@ -84,7 +124,7 @@ public long getMovieDuration() throws IOException { /** * Gets the creation time of the movie. */ - public Date getCreationTime() throws IOException { + public Instant getCreationTime() throws IOException { ensureRealized(); return meta.getCreationTime(); } @@ -92,7 +132,7 @@ public Date getCreationTime() throws IOException { /** * Gets the modification time of the movie. */ - public Date getModificationTime() throws IOException { + public Instant getModificationTime() throws IOException { ensureRealized(); return meta.getModificationTime(); } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeMeta.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeMeta.java index 7079227..7d39b8c 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeMeta.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeMeta.java @@ -13,9 +13,10 @@ import java.awt.image.IndexColorModel; import java.io.IOException; +import java.nio.ByteOrder; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; -import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; @@ -30,6 +31,11 @@ import static org.monte.media.av.FormatKeys.MediaTypeKey; import static org.monte.media.av.FormatKeys.MimeTypeKey; import static org.monte.media.av.FormatKeys.SampleFormatKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ByteOrderKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.ChannelsKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.FrameSizeKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleRateKey; +import static org.monte.media.av.codec.audio.AudioFormatKeys.SampleSizeInBitsKey; import static org.monte.media.av.codec.video.VideoFormatKeys.CompressorNameKey; import static org.monte.media.av.codec.video.VideoFormatKeys.DepthKey; import static org.monte.media.av.codec.video.VideoFormatKeys.HeightKey; @@ -152,11 +158,11 @@ public class QuickTimeMeta extends AbstractMovie { /** * Creation time of the movie. */ - protected Date creationTime; + protected Instant creationTime; /** * Modification time of the movie. */ - protected Date modificationTime; + protected Instant modificationTime; /** * The time scale of the movie. A time value that indicates the time scale * for this media—that is, the number of time units that pass per second in @@ -235,7 +241,7 @@ private void deriveTrackFormat(int trackIndex) { ); String dataFormat = (track.media != null && !track.media.sampleDescriptions.isEmpty()) - ? track.media.sampleDescriptions.getFirst().dataFormat : null; + ? track.media.sampleDescriptions.get(0).dataFormat : null; if (dataFormat != null) { format = format.append(EncodingKey, dataFormat); } @@ -248,7 +254,7 @@ private void deriveTrackFormat(int trackIndex) { if (m.sampleDescriptions.size() != 1) { throw new UnsupportedOperationException("not implemented for media with multiple sample descriptions.. " + trackIndex + " " + track.mediaType + " " + m + " " + m.sampleDescriptions); } - SampleDescription desc = m.sampleDescriptions.getFirst(); + SampleDescription desc = m.sampleDescriptions.get(0); if (desc.videoDepth == 8) { if (0 <= desc.videoColorTableId && desc.videoColorTableId < this.colorTables.size()) { @@ -266,19 +272,35 @@ private void deriveTrackFormat(int trackIndex) { DepthKey, desc.videoDepth ); if (m.timeToSamples.size() == 1) { - TimeToSampleGroup ttsg = m.timeToSamples.getFirst(); + TimeToSampleGroup ttsg = m.timeToSamples.get(0); format = format.append(FrameRateKey, new Rational(ttsg.getSampleDuration(), m.mediaTimeScale)); } else { format = format.append(FrameRateKey, new Rational(1, m.mediaTimeScale)); } break; } + case AUDIO: { + if (m.sampleDescriptions.size() != 1) { + throw new UnsupportedOperationException("not implemented for media with multiple sample descriptions.. " + trackIndex + " " + track.mediaType + " " + m + " " + m.sampleDescriptions); + } + + SampleDescription desc = m.sampleDescriptions.get(0); + format = format.append( + SampleFormatKey, desc.dataFormat, + SampleRateKey, Rational.valueOf(desc.soundSampleRate), + SampleSizeInBitsKey, desc.soundSampleSize, + FrameSizeKey, (int) desc.soundBytesPerFrame, + ChannelsKey, desc.soundNumberOfChannels, + ByteOrderKey, "sowt".equals(desc.dataFormat) ? ByteOrder.LITTLE_ENDIAN : ByteOrder.BIG_ENDIAN + ); + break; + } default: { if (m.sampleDescriptions.size() != 1) { throw new UnsupportedOperationException("not implemented for media with multiple sample descriptions.. " + trackIndex + " " + track.mediaType + " " + m + " " + m.sampleDescriptions); } - SampleDescription desc = m.sampleDescriptions.getFirst(); + SampleDescription desc = m.sampleDescriptions.get(0); format = format.append( SampleFormatKey, desc.dataFormat ); @@ -288,19 +310,19 @@ private void deriveTrackFormat(int trackIndex) { track.format = format; } - public Date getCreationTime() { + public Instant getCreationTime() { return creationTime; } - public void setCreationTime(Date creationTime) { + public void setCreationTime(Instant creationTime) { this.creationTime = creationTime; } - public Date getModificationTime() { + public Instant getModificationTime() { return modificationTime; } - public void setModificationTime(Date modificationTime) { + public void setModificationTime(Instant modificationTime) { this.modificationTime = modificationTime; } @@ -432,7 +454,7 @@ protected void clear() { compatibleBrands.clear(); compatibleBrands.add(brand); // Movie Header - creationTime = modificationTime = new Date(); + creationTime = modificationTime = Instant.ofEpochMilli(0); timeScale = 600; duration = 0; preferredRate = 1.0; @@ -860,11 +882,11 @@ protected static class Track { /** * Creation time of the track. */ - protected Date creationTime; + protected Instant creationTime; /** * Modification time of the track. */ - protected Date modificationTime; + protected Instant modificationTime; /** * The id of the track. The value 0 cannot be used. */ @@ -1016,7 +1038,7 @@ public void buildTrackSamplesTable(long movieTimeScale) throws IOException { long mediaSampleTime = entry.getKey(); // if multiple samples have the same timestamp, then only the last one has a duration >=0 - MediaSample lastMediaSample = mediaSamples.getLast(); + MediaSample lastMediaSample = mediaSamples.get(mediaSamples.size() - 1); long mediaSampleDuration = lastMediaSample.duration; // cut duration if the media sample ends after the end time of the edit long cutStart = Math.max(0, mediaSampleTime + mediaSampleDuration - editMediaEndTime); @@ -1155,8 +1177,8 @@ public String toString() { protected static class Media { // BEGIN Media Header - protected Date mediaCreationTime; - protected Date mediaModificationTime; + protected Instant mediaCreationTime; + protected Instant mediaModificationTime; /** * The timescale of the media in the track. A time value that indicates * the timescale for this media. That is, the number of time units that diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeOutputStream.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeOutputStream.java index 1485dcd..fc9ffb3 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeOutputStream.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeOutputStream.java @@ -1,13 +1,20 @@ /* - * @(#)QuickTimeOutputStream.java + * @(#)MP4OutputStream.java * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. */ package org.monte.media.quicktime; import org.monte.media.av.Format; +import org.monte.media.av.codec.video.VideoFormatKeys; +import org.monte.media.io.ByteArrayImageOutputStream; +import org.monte.media.io.IOStreams; import org.monte.media.io.ImageOutputStreamAdapter; -import org.monte.media.io.SeekableByteArrayOutputStream; import org.monte.media.math.Rational; +import org.monte.media.qtff.AbstractQTFFMovieStream; +import org.monte.media.qtff.AvcDecoderConfigurationRecord; +import org.monte.media.qtff.QTFFImageOutputStream; +import org.monte.media.util.ByteArray; +import org.monte.media.util.MathUtil; import javax.imageio.stream.FileImageOutputStream; import javax.imageio.stream.ImageOutputStream; @@ -18,12 +25,17 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.nio.ByteOrder; -import java.util.Date; +import java.time.Instant; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; import java.util.zip.DeflaterOutputStream; import static java.lang.Math.max; +import static org.monte.media.av.FormatKeys.DataClassKey; import static org.monte.media.av.FormatKeys.EncodingKey; import static org.monte.media.av.FormatKeys.MIME_QUICKTIME; import static org.monte.media.av.FormatKeys.MediaType; @@ -42,7 +54,8 @@ * * @author Werner Randelshofer */ -public class QuickTimeOutputStream extends AbstractQuickTimeStream { +public class QuickTimeOutputStream extends AbstractQTFFMovieStream { + /** * Creates a new instance. @@ -51,7 +64,7 @@ public class QuickTimeOutputStream extends AbstractQuickTimeStream { */ public QuickTimeOutputStream(File file) throws IOException { if (file.exists()) { - file.delete(); + if (!file.delete()) throw new IOException("can not delete file"); } this.out = new FileImageOutputStream(file); this.streamOffset = 0; @@ -70,8 +83,7 @@ public QuickTimeOutputStream(ImageOutputStream out) throws IOException { } private void init() { - creationTime = new Date(); - modificationTime = new Date(); + creationTime = modificationTime = Instant.ofEpochMilli(0); } /** @@ -228,7 +240,7 @@ public int addVideoTrack(String compressionType, String compressorName, long tim if (compressionType == null || compressionType.length() != 4) { throw new IllegalArgumentException("compressionType must be 4 characters long:" + compressionType); } - if (compressorName == null || compressorName.length() < 1 || compressorName.length() > 32) { + if (compressorName == null || compressorName.isEmpty() || compressorName.length() > 32) { throw new IllegalArgumentException("compressorName must be between 1 and 32 characters long:" + (compressorName == null ? "null" : "\"" + compressorName + "\"")); } if (timeScale < 1 || timeScale > (2L << 32)) { @@ -246,7 +258,7 @@ public int addVideoTrack(String compressionType, String compressorName, long tim t.height = height; t.videoDepth = depth; t.syncInterval = syncInterval; - t.format = format; + t.format = format.prepend(VideoFormatKeys.DataClassKey, byte[].class); tracks.add(t); return tracks.size() - 1; } @@ -254,20 +266,20 @@ public int addVideoTrack(String compressionType, String compressorName, long tim /** * Adds an audio track. * - * @param compressionType The QuickTime 4-character code. A list of - * supported 4-Character codes is given in qtff, table 3-7, page 113. - * @param timeScale The media time scale between 1 and 2^32. - * @param sampleRate The sample rate. The integer portion must match the - * {@code timeScale}. - * @param numberOfChannels The number of channels: 1 for mono, 2 for stereo. - * @param sampleSizeInBits The number of bits in a sample: 8 or 16. - * @param isCompressed Whether the sound is compressed. - * @param frameDuration The frame duration, expressed in the media’s - * timescale, where the timescale is equal to the sample rate. For - * uncompressed formats, this field is always 1. - * @param frameSize For uncompressed audio, the number of bytes in a sample - * for a single channel (sampleSize divided by 8). For compressed audio, the - * number of bytes in a frame. + * @param compressionType The QuickTime 4-character code. A list of + * supported 4-Character codes is given in qtff, table 3-7, page 113. + * @param timeScale The media time scale between 1 and 2^32. + * @param sampleRate The sample rate. The integer portion must match the + * {@code timeScale}. + * @param numberOfChannels The number of channels: 1 for mono, 2 for stereo. + * @param sampleSizeInBits The number of bits in a sample: 8 or 16. + * @param isCompressed Whether the sound is compressed. + * @param frameDuration The frame duration, expressed in the media’s + * timescale, where the timescale is equal to the sample rate. For + * uncompressed formats, this field is always 1. + * @param soundBytesPerPacket For uncompressed audio, the number of bytes in a sample + * for a single channel (sampleSize divided by 8). For compressed audio, the + * number of bytes in a frame. * @return Returns the track index. * @throws IllegalArgumentException if the audioFormat is not 4 characters * long, if the time scale is not between 1 and 2^32, if the integer portion @@ -278,7 +290,7 @@ public int addAudioTrack(String compressionType, // long timeScale, double sampleRate, // int numberOfChannels, int sampleSizeInBits, // boolean isCompressed, // - int frameDuration, int frameSize, boolean signed, ByteOrder byteOrder) throws IOException { + int frameDuration, int soundBytesPerPacket, boolean signed, ByteOrder byteOrder) throws IOException { ensureStarted(); if (compressionType == null || compressionType.length() != 4) { throw new IllegalArgumentException("audioFormat must be 4 characters long:" + compressionType); @@ -305,22 +317,23 @@ public int addAudioTrack(String compressionType, // t.soundSampleSize = sampleSizeInBits; t.soundSamplesPerPacket = frameDuration; if (isCompressed) { - t.soundBytesPerPacket = frameSize; - t.soundBytesPerFrame = frameSize * numberOfChannels; + t.soundBytesPerPacket = soundBytesPerPacket; + t.soundBytesPerFrame = soundBytesPerPacket * numberOfChannels; } else { - t.soundBytesPerPacket = frameSize / numberOfChannels; - t.soundBytesPerFrame = frameSize; + t.soundBytesPerPacket = soundBytesPerPacket; + t.soundBytesPerFrame = soundBytesPerPacket * numberOfChannels; } t.soundBytesPerSample = sampleSizeInBits / 8; t.format = new Format( + DataClassKey, byte[].class, MediaTypeKey, MediaType.AUDIO, MimeTypeKey, MIME_QUICKTIME, EncodingKey, compressionType, SampleRateKey, Rational.valueOf(sampleRate), SampleSizeInBitsKey, sampleSizeInBits, ChannelsKey, numberOfChannels, - FrameSizeKey, frameSize, + FrameSizeKey, soundBytesPerPacket, SampleRateKey, Rational.valueOf(sampleRate), SignedKey, signed, ByteOrderKey, byteOrder); @@ -374,28 +387,28 @@ public int getSyncInterval(int track) { /** * Sets the creation time of the movie. */ - public void setCreationTime(Date creationTime) { + public void setCreationTime(Instant creationTime) { this.creationTime = creationTime; } /** * Gets the creation time of the movie. */ - public Date getCreationTime() { + public Instant getCreationTime() { return creationTime; } /** * Sets the modification time of the movie. */ - public void setModificationTime(Date modificationTime) { + public void setModificationTime(Instant modificationTime) { this.modificationTime = modificationTime; } /** * Gets the modification time of the movie. */ - public Date getModificationTime() { + public Instant getModificationTime() { return modificationTime; } @@ -578,7 +591,7 @@ public double[] getTransformationMatrix(int track) { } /** - * Sets the state of the QuickTimeWriter to started.

If the state is + * Sets the state of the MP4Writer to started.

If the state is * changed by this method, the prolog is written. */ protected void ensureStarted() throws IOException { @@ -620,7 +633,7 @@ public void writeSample(int track, File file, long duration, boolean isSync) thr /** * Writes an already encoded sample from an input stream into a track.

* This method does not inspect the contents of the samples. The contents - * has to match the format and dimensions of the media in this track. + * have to match the format and dimensions of the media in this track. * * @param track The track index. * @param in The input stream which holds the encoded sample data. @@ -639,8 +652,8 @@ public void writeSample(int track, InputStream in, long duration, boolean isSync ensureOpen(); ensureStarted(); long offset = getRelativeStreamPosition(); - OutputStream mdatOut = mdatAtom.getOutputStream(); - in.transferTo(mdatOut); + QTFFImageOutputStream mdatOut = mdatAtom.getOutputStream(); + IOStreams.copy(in, mdatOut); long length = getRelativeStreamPosition() - offset; t.addSample(new Sample(duration, offset, length), 1, isSync); } @@ -684,7 +697,7 @@ public void writeSample(int track, byte[] data, int off, int len, long duration, ensureOpen(); ensureStarted(); long offset = getRelativeStreamPosition(); - OutputStream mdatOut = mdatAtom.getOutputStream(); + ImageOutputStream mdatOut = mdatAtom.getOutputStream(); mdatOut.write(data, off, len); t.addSample(new Sample(duration, offset, len), 1, isSync); } @@ -761,7 +774,7 @@ public void writeSamples(int track, int sampleCount, byte[] data, int off, int l ensureOpen(); ensureStarted(); long offset = getRelativeStreamPosition(); - OutputStream mdatOut = mdatAtom.getOutputStream(); + ImageOutputStream mdatOut = mdatAtom.getOutputStream(); mdatOut.write(data, off, len); @@ -771,6 +784,34 @@ public void writeSamples(int track, int sampleCount, byte[] data, int off, int l t.addChunk(new Chunk(first, last, sampleCount, 1), isSync); } + /** + * Writes an {@link AvcDecoderConfigurationRecord} into the track. + * + * @param track the track index + * @param r the record + */ + public void writeAvcDecoderConfigurationRecord(int track, AvcDecoderConfigurationRecord r) { + Track t = tracks.get(track); // throws index out of bounds exception if illegal track index + if (t instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) t; + AvcDecoderConfigurationRecord record = vt.avcDecoderConfigurationRecord; + if (record == null) { + record = new AvcDecoderConfigurationRecord(r.avcProfileIndication(), + r.profileCompatibility(), r.avcLevelIndication(), r.nalLengthSize(), + r.sequenceParameterSetNALUnit(), r.pictureParameterSetNALUnit()); + } else { + var pps = new LinkedHashSet<>(record.pictureParameterSetNALUnit()); + pps.addAll(r.pictureParameterSetNALUnit()); + var sps = new LinkedHashSet<>(record.sequenceParameterSetNALUnit()); + pps.addAll(r.sequenceParameterSetNALUnit()); + record = new AvcDecoderConfigurationRecord(r.avcProfileIndication(), + r.profileCompatibility(), r.avcLevelIndication(), r.nalLengthSize(), + pps, sps); + } + vt.avcDecoderConfigurationRecord = record; + } + } + /** * Returns true if the limit for media samples has been reached. If this * limit is reached, no more samples should be added to the movie.

@@ -862,15 +903,12 @@ private void writeProlog() throws IOException { } ftypAtom; */ DataAtom ftypAtom = new DataAtom("ftyp"); - DataAtomOutputStream d = ftypAtom.getOutputStream(); + QTFFImageOutputStream d = ftypAtom.getOutputStream(); d.writeType("qt "); // brand - d.writeBCD4(2005); // versionYear - d.writeBCD2(3); // versionMonth + d.writeBCD4(0); // versionYear + d.writeBCD2(0); // versionMonth d.writeBCD2(0); // versionMinor d.writeType("qt "); // compatibleBrands - d.writeInt(0); // compatibleBrands (0 is used to denote no value) - d.writeInt(0); // compatibleBrands (0 is used to denote no value) - d.writeInt(0); // compatibleBrands (0 is used to denote no value) ftypAtom.finish(); } @@ -917,7 +955,7 @@ private void writeEpilog() throws IOException { */ leaf = new DataAtom("mvhd"); moovAtom.add(leaf); - DataAtomOutputStream d = leaf.getOutputStream(); + QTFFImageOutputStream d = leaf.getOutputStream(); d.writeByte(0); // version // A 1-byte specification of the version of this movie header atom. @@ -974,7 +1012,7 @@ private void writeEpilog() throws IOException { // The matrix structure associated with this movie. A matrix shows how // to map points from one coordinate space into another. See “Matrices” // for a discussion of how display matrices are used in QuickTime: - // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap4/chapter_5_section_4.html#//apple_ref/doc/uid/TP40000939-CH206-18737 + // https://developer.apple.com/documentation/quicktime-file-format/movie_header_atom/matrix_structure d.writeUInt(previewTime); // previewTime // The time value in the movie at which the preview begins. @@ -1006,9 +1044,10 @@ private void writeEpilog() throws IOException { // Optional color table atom for (Track t : tracks) { - if (t instanceof VideoTrack vt) { + if (t instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) t; if (vt.videoColorTable != null) { - vt.writeColorTableAtom(moovAtom); + writeVideoColorTableAtom(vt, moovAtom); break; } } @@ -1019,11 +1058,11 @@ private void writeEpilog() throws IOException { moovAtom.finish(); } - protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modificationTime) throws IOException { + protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Instant modificationTime) throws IOException { Track t = tracks.get(trackIndex); DataAtom leaf; - DataAtomOutputStream d; + QTFFImageOutputStream d; /* Track Atom ======== */ CompositeAtom trakAtom = new CompositeAtom("trak"); @@ -1140,8 +1179,11 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi d.writeFixed16D16(m[7]); // matrix[7] d.writeFixed2D30(m[8]); // matrix[8] // The matrix structure associated with this track. - // See Figure 2-8 for an illustration of a matrix structure: - // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap2/chapter_3_section_3.html#//apple_ref/doc/uid/TP40000939-CH204-32967 + // [ 0 1 2 ] + // [ 3 4 5 ] + // [ 6 7 8 ] + // See + // https://developer.apple.com/documentation/quicktime-file-format/movie_header_atom/matrix_structure d.writeFixed16D16(t.mediaType == MediaType.VIDEO ? t.width : 0); // width // A 32-bit fixed-point number that specifies the width of this track in pixels. @@ -1149,6 +1191,49 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi d.writeFixed16D16(t.mediaType == MediaType.VIDEO ? t.height : 0); // height // A 32-bit fixed-point number that indicates the height of this track in pixels. + if (t instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) t; + writeTaptAtoms(trakAtom, vt); + } + writeEditAtoms(trakAtom, t); + writeMediaAtoms(trakAtom, trackIndex, modificationTime, t); + } + + private void writeTaptAtoms(CompositeAtom trakAtom, VideoTrack t) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; + /* Edit Atom ========= */ + CompositeAtom taptAtom = new CompositeAtom("tapt"); + trakAtom.add(taptAtom); + + /* Track Clean Aperture Dimensions, + Track Production Aperture Dimensions, + Track Encoded Pixels Dimensions + */ + /* + typedef struct { + byte version; + byte[3] flags; + fixed16d16 width; + fixed16d16 height; + } clefAtom; + */ + for (String id : List.of("clef", "prof", "enof")) { + leaf = new DataAtom(id); + taptAtom.add(leaf); + d = leaf.getOutputStream(); + d.write(0); // version + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + d.writeFixed16D16(t.width); + d.writeFixed16D16(t.height); + } + } + + private void writeEditAtoms(CompositeAtom trakAtom, Track t) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; /* Edit Atom ========= */ CompositeAtom edtsAtom = new CompositeAtom("edts"); trakAtom.add(edtsAtom); @@ -1161,7 +1246,7 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi int numberOfEntries; editListTable editListTable[numberOfEntries]; } editListAtom; - + typedef struct { int trackDuration; int mediaTime; @@ -1173,8 +1258,6 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi d = leaf.getOutputStream(); d.write(0); // version - // One byte that specifies the version of this header atom. - d.write(0); // flag[0] d.write(0); // flag[1] d.write(0); // flag[2] @@ -1193,9 +1276,12 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi d.writeUInt(edit.mediaRate); // mediaRate } } + } - - /* Media Atom ========= */ + /* Media Atom ========= */ + private void writeMediaAtoms(CompositeAtom trakAtom, int trackIndex, Instant modificationTime, Track t) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; CompositeAtom mdiaAtom = new CompositeAtom("mdia"); trakAtom.add(mdiaAtom); @@ -1244,7 +1330,7 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi d.writeShort(0); // language; // A 16-bit integer that specifies the language code for this media. // See “Language Code Values” for valid language codes: - // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap4/chapter_5_section_2.html#//apple_ref/doc/uid/TP40000939-CH206-27005 + // https://developer.apple.com/documentation/quicktime-file-format/language_code_values d.writeShort(0); // quality // A 16-bit integer that specifies the media’s playback quality—that is, @@ -1288,13 +1374,7 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi // For data handlers, this field defines the data reference type—for // example, a component subtype value of 'alis' identifies a file alias. - if (t.mediaType == MediaType.AUDIO) { - d.writeType("appl"); - } else { - d.writeUInt(0); - } - // componentManufacturer - // Reserved. Set to 0. + d.writeType(t.componentManufacturer); // componentManufacturer d.writeUInt(t.mediaType == MediaType.AUDIO ? 268435456L : 0); // componentFlags // Reserved. Set to 0. @@ -1302,7 +1382,7 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi d.writeUInt(t.mediaType == MediaType.AUDIO ? 65941 : 0); // componentFlagsMask // Reserved. Set to 0. - d.writePString(t.mediaType == MediaType.AUDIO ? "Apple Sound Media Handler" : ""); // componentName (empty string) + d.writePString(t.componentName); // componentName (empty string) // A (counted) string that specifies the name of the component—that is, // the media handler used when this media was created. This field may // contain a zero-length (empty) string. @@ -1314,7 +1394,7 @@ protected void writeTrackAtoms(int trackIndex, CompositeAtom moovAtom, Date modi protected void writeMediaInformationAtoms(int trackIndex, CompositeAtom mdiaAtom) throws IOException { Track t = tracks.get(trackIndex); DataAtom leaf; - DataAtomOutputStream d; + QTFFImageOutputStream d; /* Media Information atom ========= */ CompositeAtom minfAtom = new CompositeAtom("minf"); mdiaAtom.add(minfAtom); @@ -1366,13 +1446,8 @@ protected void writeMediaInformationAtoms(int trackIndex, CompositeAtom mdiaAtom // For data handlers, this field defines the data reference type—for // example, a component subtype value of 'alis' identifies a file alias. - if (t.mediaType == MediaType.AUDIO) { - d.writeType("appl"); - } else { - d.writeUInt(0); - } - // componentManufacturer - // Reserved. Set to 0. + d.writeType(t.componentManufacturer); // componentManufacturer + d.writeUInt(t.mediaType == MediaType.AUDIO ? 268435457L : 0); // componentFlags // Reserved. Set to 0. @@ -1380,7 +1455,7 @@ protected void writeMediaInformationAtoms(int trackIndex, CompositeAtom mdiaAtom d.writeInt(t.mediaType == MediaType.AUDIO ? 65967 : 0); // componentFlagsMask // Reserved. Set to 0. - d.writePString("Apple Alias Data Handler"); // componentName (empty string) + d.writePString(t.componentName); // componentName (empty string) // A (counted) string that specifies the name of the component—that is, // the media handler used when this media was created. This field may // contain a zero-length (empty) string. @@ -1458,7 +1533,7 @@ protected void writeMediaInformationAtoms(int trackIndex, CompositeAtom mdiaAtom protected void writeVideoMediaInformationHeaderAtom(int trackIndex, CompositeAtom minfAtom) throws IOException { DataAtom leaf; - DataAtomOutputStream d; + QTFFImageOutputStream d; /* Video media information atom -------- */ leaf = new DataAtom("vmhd"); @@ -1502,7 +1577,7 @@ protected void writeVideoMediaInformationHeaderAtom(int trackIndex, CompositeAto protected void writeSoundMediaInformationHeaderAtom(int trackIndex, CompositeAtom minfAtom) throws IOException { DataAtom leaf; - DataAtomOutputStream d; + QTFFImageOutputStream d; /* Sound media information header atom -------- */ leaf = new DataAtom("smhd"); @@ -1543,14 +1618,22 @@ protected void writeSoundMediaInformationHeaderAtom(int trackIndex, CompositeAto protected void writeSampleTableAtoms(int trackIndex, CompositeAtom minfAtom) throws IOException { Track t = tracks.get(trackIndex); DataAtom leaf; - DataAtomOutputStream d; + QTFFImageOutputStream d; /* Sample Table atom ========= */ CompositeAtom stblAtom = new CompositeAtom("stbl"); minfAtom.add(stblAtom); /* Sample Description atom ------- */ - t.writeSampleDescriptionAtom(stblAtom); + if (Objects.requireNonNull(t) instanceof VideoTrack) { + VideoTrack vt = (VideoTrack) Objects.requireNonNull(t); + writeVideoSampleDescriptionAtom(vt, stblAtom); + } else if (t instanceof AudioTrack) { + AudioTrack at = (AudioTrack) t; + writeAudioSampleDescriptionAtom(at, stblAtom); + } else { + writeGenericSampleDescriptionAtom(t, stblAtom); + } /* Time to Sample atom ---- */ @@ -1736,13 +1819,13 @@ protected void writeSampleTableAtoms(int trackIndex, CompositeAtom minfAtom) thr ? ((AudioTrack) t).soundSampleSize / 8 * ((AudioTrack) t).soundNumberOfChannels// : 1; if (t.sampleSizes.size() == 1) { - d.writeUInt(t.sampleSizes.getFirst().getSampleLength() / sampleUnit); // sample size + d.writeUInt(t.sampleSizes.get(0).getSampleLength() / sampleUnit); // sample size // A 32-bit integer specifying the sample size. If all the samples are // the same size, this field contains that size value. If this field is // set to 0, then the samples have different sizes, and those sizes are // stored in the sample size table. - d.writeUInt(t.sampleSizes.getFirst().getSampleCount()); // number of entries + d.writeUInt(t.sampleSizes.get(0).getSampleCount()); // number of entries // A 32-bit integer containing the count of entries in the sample size // table. @@ -1780,7 +1863,7 @@ protected void writeSampleTableAtoms(int trackIndex, CompositeAtom minfAtom) thr // 32-bit or 64-bit offsets. The latter is useful when managing very // large movies. Only one of these variants occurs in any single // instance of a sample table atom. - if (t.chunks.isEmpty() || t.chunks.getLast().getChunkOffset() <= 0xffffffffL) { + if (t.chunks.isEmpty() || t.chunks.get(t.chunks.size() - 1).getChunkOffset() <= 0xffffffffL) { /* 32-bit chunk offset atom -------- */ leaf = new DataAtom("stco"); stblAtom.add(leaf); @@ -1854,6 +1937,10 @@ protected void writeSampleTableAtoms(int trackIndex, CompositeAtom minfAtom) thr } } + private void writeGenericSampleDescriptionAtom(Track t, CompositeAtom stblAtom) { + // empty, for now + } + /** * Writes a version of the movie which is optimized for the web into the * specified output file.

This method finishes the movie and then copies @@ -1874,7 +1961,7 @@ public void toWebOptimizedMovie(File outputFile, boolean compressHeader) throws out = null; if (compressHeader) { - SeekableByteArrayOutputStream buf = new SeekableByteArrayOutputStream(); + ByteArrayImageOutputStream buf = new ByteArrayImageOutputStream(); int maxIteration = 5; long compressionHeadersSize = 40 + 8; long headerSize = 0; @@ -1882,7 +1969,7 @@ public void toWebOptimizedMovie(File outputFile, boolean compressHeader) throws while (true) { mdatOffset = compressionHeadersSize + headerSize + freeSize; buf.reset(); - DeflaterOutputStream deflater = new DeflaterOutputStream(buf); + DeflaterOutputStream deflater = new DeflaterOutputStream(new ImageOutputStreamAdapter(buf)); out = new MemoryCacheImageOutputStream(deflater); writeEpilog(); out.close(); @@ -1902,13 +1989,13 @@ public void toWebOptimizedMovie(File outputFile, boolean compressHeader) throws if (buf.size() == 0) { compressHeader = false; - System.err.println("WARNING QuickTimeWriter failed to compress header."); + System.err.println("WARNING MP4Writer failed to compress header."); } else { out = new FileImageOutputStream(outputFile); writeProlog(); // 40 bytes compression headers - DataAtomOutputStream daos = new DataAtomOutputStream(new ImageOutputStreamAdapter(out)); + QTFFImageOutputStream daos = new QTFFImageOutputStream(out); daos.writeUInt(headerSize + 40); daos.writeType("moov"); @@ -1955,4 +2042,467 @@ public void toWebOptimizedMovie(File outputFile, boolean compressHeader) throws out = originalOut; } } + + protected void writeVideoSampleDescriptionAtom(VideoTrack t, CompositeAtom stblAtom) throws IOException { + CompositeAtom leaf; + QTFFImageOutputStream d; + + /* Sample Description atom ------- */ + // The sample description atom stores information that allows you to + // decode samples in the media. The data stored in the sample + // description varies, depending on the media type. For example, in the + // case of video media, the sample descriptions are image description + // structures. The sample description information for each media type is + // explained in “Media Data Atom Types”: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_1.html#//apple_ref/doc/uid/TP40000939-CH205-SW1 + leaf = new CompositeAtom("stsd"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + sampleDescriptionEntry sampleDescriptionTable[numberOfEntries]; + } sampleDescriptionAtom; + + typedef struct { + int size; + magic type; + byte[6] reserved; // six bytes that must be zero + short dataReferenceIndex; // A 16-bit integer that contains the index + //of the data reference to use to retrieve + //data associated with samples that use this + //sample description. Data references are + //stored in data reference atoms. + byte[size - 16] data; + } sampleDescriptionEntry; + */ + d = leaf.getOutputStream(); + d.write(0); // version + // A 1-byte specification of the version of this sample description atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for sample description flags. Set this field to 0. + + d.writeInt(1); // number of Entries + // A 32-bit integer containing the number of sample descriptions that follow. + + // A 32-bit integer indicating the number of bytes in the sample description. + long sizeStreamPosition = d.getStreamPosition(); + d.writeInt(0); // sampleDescriptionTable[0].size + + d.writeType(t.mediaCompressionType); // sampleDescriptionTable[0].type + + // A 32-bit integer indicating the format of the stored data. + // This depends on the media type, but is usually either the + // compression format or the media type. + + d.write(new byte[6]); // sampleDescriptionTable[0].reserved + // Six bytes that must be set to 0. + + d.writeShort(1); // sampleDescriptionTable[0].dataReferenceIndex + // A 16-bit integer that contains the index of the data + // reference to use to retrieve data associated with samples + // that use this sample description. Data references are stored + // in data reference atoms. + + // Video Sample Description + // ------------------------ + // The format of the following fields is described here: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_2.html + + d.writeShort(0); // sampleDescriptionTable.videoSampleDescription.version + // A 16-bit integer indicating the version number of the + // compressed data. This is set to 0, unless a compressor has + // changed its data format. + + d.writeShort(0); // sampleDescriptionTable.videoSampleDescription.revisionLevel + // A 16-bit integer that must be set to 0. + + d.writeType(t.componentManufacturer); // sampleDescriptionTable.videoSampleDescription.manufacturer + // A 32-bit integer that specifies the developer of the + // compressor that generated the compressed data. Often this + // field contains 'appl' to indicate Apple Computer, Inc. + + d.writeInt(0); // sampleDescriptionTable.videoSampleDescription.temporalQuality + // A 32-bit integer containing a value from 0 to 1023 indicating + // the degree of temporal compression. + + d.writeInt(MathUtil.clamp((int) (1024 * (1 - t.videoQuality)), 0, 1024)); // sampleDescriptionTable.videoSampleDescription.spatialQuality + // A 32-bit integer containing a value from 0 to 1024 indicating + // the degree of spatial compression. + + d.writeUShort((int) t.width); // sampleDescriptionTable.videoSampleDescription.width + // A 16-bit integer that specifies the width of the source image + // in pixels. + + d.writeUShort((int) t.height); // sampleDescriptionTable.videoSampleDescription.height + // A 16-bit integer that specifies the height of the source image in pixels. + + d.writeFixed16D16(72.0); // sampleDescriptionTable.videoSampleDescription.horizontalResolution + // A 32-bit fixed-point number containing the horizontal + // resolution of the image in pixels per inch. + + d.writeFixed16D16(72.0); // sampleDescriptionTable.videoSampleDescription.verticalResolution + // A 32-bit fixed-point number containing the vertical + // resolution of the image in pixels per inch. + + d.writeInt(0); // sampleDescriptionTable.videoSampleDescription.dataSize + // A 32-bit integer that must be set to 0. + + d.writeShort(1); // sampleDescriptionTable.videoSampleDescription.sampleCount + // A 16-bit integer that indicates how many bytes of compressed + // data are stored in each sample. Usually set to 1. + + d.writePString(t.mediaCompressorName, 32); // sampleDescriptionTable.videoSampleDescription.compressorName + // A 32-byte Pascal string containing the name of the compressor + // that created the image, such as "jpeg". + + d.writeShort(t.videoDepth); // sampleDescriptionTable.videoSampleDescription.depth + // A 16-bit integer that indicates the pixel depth of the + // compressed image. Values of 1, 2, 4, 8 ,16, 24, and 32 + // indicate the depth of color images. The value 32 should be + // used only if the image contains an alpha channel. Values of + // 34, 36, and 40 indicate 2-, 4-, and 8-bit grayscale, + // respectively, for grayscale images. + + d.writeShort(t.videoColorTable == null ? -1 : 0); // sampleDescriptionTable.videoSampleDescription.colorTableID + // A 16-bit integer that identifies which color table to use. + // If this field is set to –1, the default color table should be + // used for the specified depth. For all depths below 16 bits + // per pixel, this indicates a standard Macintosh color table + // for the specified depth. Depths of 16, 24, and 32 have no + // color table. + + if (t.avcDecoderConfigurationRecord != null) { + writeMandatoryAvcAtoms(t, leaf); + } + + long size = (d.getStreamPosition() - sizeStreamPosition); + d.mark(); + d.seek(sizeStreamPosition); + d.writeInt((int) size); + d.reset(); + } + + /** + * Writes the avcC atom. + * + * @param t the track + * @param parent the composite atom + * @throws IOException on IO failure + */ + private void writeMandatoryAvcAtoms(VideoTrack t, CompositeAtom parent) throws IOException { + DataAtom leaf = new DataAtom("avcC"); + parent.add(leaf); + /* + typedef struct { + ubyte configurationVersion; // always = 1 + ubyte AVCProfileIndication; // Contains the profile code as defined in ISO/IEC 14496-10. + // profile_compatibility is a byte defined exactly the same as the byte which occurs + // between the profile_IDC and level_IDC in a sequence parameter set (SPS), + // as defined in ISO/IEC 14496-10 + ubyte profile_compatibility; // + ubyte AVCLevelIndication; // Contains the level code as defined in ISO/IEC 14496-10. + uint6 reserved1; // always 111111 + uint2 lengthSizeMinusOne; // Indicates the length in bytes of the NALUnitLength field in an AVC + // video sample or AVC parameter set sample of the associated stream minus one. + // For example, a size of one byte is indicated with a value of 0. The value of this field + // shall be one of 0, 1, or 3 corresponding to a length encoded with 1, 2, or 4 bytes, + // respectively. + uint3 reserved2; // always 111 + uint5 numOfSequenceParameterSets; // numOfSequenceParameterSets indicates the number of SPSs that are used as the + // initial set of SPSs for decoding the AVC elementary stream. + AvcSequenceParameterSet[numOfSequenceParameterSets] sequenceParameterSet; + uint8 numOfPictureParameterSets; // Indicates the number of picture parameter sets (PPSs) that are used as the + // initial set of PPSs for decoding the AVC elementary stream. + AvcPictureParameterSet[numOfPictureParameterSets] pictureParameterSet + } AvcDecoderConfigurationRecord; + */ + var d = leaf.getOutputStream(); + AvcDecoderConfigurationRecord r = t.avcDecoderConfigurationRecord; + d.writeByte(1);//version + d.writeByte(r.avcProfileIndication()); + d.writeByte(r.profileCompatibility()); + d.writeByte(r.avcLevelIndication()); + d.writeByte(0b111111_00 | (r.nalLengthSize() - 1)); + + Set spsList = r.sequenceParameterSetNALUnit(); + int n = Math.min(spsList.size(), (1 << 5) - 1); + d.writeByte(0b111_00000 | n); + Iterator it = spsList.iterator(); + for (int i = 0; i < n; i++) { + byte[] sps = it.next().getArray(); + d.writeShort((short) (sps.length + 1)); + d.writeByte((byte) 0x67); + d.write(sps); + } + + Set ppsList = r.pictureParameterSetNALUnit(); + n = Math.min(ppsList.size(), (1 << 8) - 1); + d.writeByte(n); + it = ppsList.iterator(); + for (int i = 0; i < n; i++) { + byte[] pps = it.next().getArray(); + d.writeShort((short) (pps.length + 1)); + d.writeByte((byte) 0x68); + d.write(pps); + } + + /* colr atom */ + /*---------*/ + /* + typedef struct { + magic colorParameterType; // An unsigned 32-bit field. + // The currently defined types are 'nclc' for video, and 'prof' for print. + uint16 primariesIndex; // A 16-bit unsigned integer containing an index into a table specifying the + // CIE 1931 xy chromaticity coordinates of the white point and the red, green, + // and blue primaries. + // Index 1 + // Recommendation ITU-R BT.709 white x = 0.3127 y = 0.3290 (CIE III. D65) red x = 0.640 y = 0.330 green x = 0.300 y = 0.600 blue x = 0.150 y = 0.060 + + uint16 transferFunctionIndex; // A 16-bit unsigned integer containing an index into a table specifying the + // nonlinear transfer function coefficients used to translate between RGB color space + // values and Y´CbCr values. + // Index 1 + // Recommendation ITU-R BT.709-2, SMPTE 274M-1995, 296M-1997, 293M-1996, 170M-1994 An image that shows two formulas for transfer functions for index 1. The first formula is E’ with subscript W is equal to four point five zero zero for zero is less than or equal to W is less than zero point zero one eight. The second formula is E’ with subscript W is equal to one point zero nine nine W raised to the power zero point four five, minus zero point zero nine nine for zero point zero one eight is less than or equal to W is less than or equal to one. + + uint16 matrixIndex; // A 16-bit unsigned integer containing an index into a table specifying the + // transformation matrix coefficients used to translate between RGB color space values + // and Y´CbCr values. + // Index 1 + // Recommendation ITU-R BT.709-2 (1125/60/2:1 only), SMPTE 274M-1995, 296M-1997 An image that shows the formula for matrix index 1. The formula is E’ with subscript Y is equal to zero point seven one five two E’ with subscript G, plus zero point zero seven two two E’ with subscript B, plus zero point two one two six E’ with subscript R. + // https://developer.apple.com/documentation/quicktime-file-format/color_parameter_atom + + } videoColrSampleDescriptionExtensionAtom; + */ + /* + leaf = new DataAtom("colr"); + parent.add(leaf); + d = leaf.getOutputStream(); + d.writeType("nclc"); + d.writeUShort(1); + d.writeUShort(1); + d.writeUShort(1); + */ + + /* pasp atom */ + /*---------*/ + /* + typedef struct { + uint32 hSpacing; // An unsigned 32-bit integer specifying the horizontal spacing of pixels, + // such as luma sampling instants for Y´CbCr or YUV video. + uint32 vSpacing; // An unsigned 32-bit integer specifying the vertical spacing of pixels, + // such as video picture lines. + } videoPaspSampleDescriptionExtensionAtom; + https://developer.apple.com/documentation/quicktime-file-format/pixel_aspect_ratio + */ + Rational pixelAspectRatio = t.format.get(VideoFormatKeys.PixelAspectRatioKey, Rational.ONE); + if (!pixelAspectRatio.equals(Rational.ONE)) { + leaf = new DataAtom("pasp"); + parent.add(leaf); + d = leaf.getOutputStream(); + d.writeUInt(pixelAspectRatio.getNumerator()); + d.writeUInt(pixelAspectRatio.getDenominator()); + } + } + + /** + * Color table atoms define a list of preferred colors for displaying + * the movie on devices that support only 256 colors. The list may + * contain up to 256 colors. These optional atoms have a type value of + * 'ctab'. The color table atom contains a Macintosh color table data + * structure. + * + * @param stblAtom + * @throws IOException + */ + protected void writeVideoColorTableAtom(VideoTrack t, CompositeAtom stblAtom) throws IOException { + DataAtom leaf; + QTFFImageOutputStream d; + leaf = new DataAtom("ctab"); + stblAtom.add(leaf); + + d = leaf.getOutputStream(); + + d.writeUInt(0); // Color table seed. A 32-bit integer that must be set to 0. + d.writeUShort(0x8000); // Color table flags. A 16-bit integer that must be set to 0x8000. + IndexColorModel videoColorTable = t.videoColorTable; + d.writeUShort(videoColorTable.getMapSize() - 1); + // Color table size. A 16-bit integer that indicates the number of + // colors in the following color array. This is a zero-relative value; + // setting this field to 0 means that there is one color in the array. + + for (int i = 0, n = videoColorTable.getMapSize(); i < n; ++i) { + // An array of colors. Each color is made of four unsigned 16-bit integers. + // The first integer must be set to 0, the second is the red value, + // the third is the green value, and the fourth is the blue value. + d.writeUShort(0); + d.writeUShort((videoColorTable.getRed(i) << 8) | videoColorTable.getRed(i)); + d.writeUShort((videoColorTable.getGreen(i) << 8) | videoColorTable.getGreen(i)); + d.writeUShort((videoColorTable.getBlue(i) << 8) | videoColorTable.getBlue(i)); + } + } + + protected void writeAudioSampleDescriptionAtom(AudioTrack t, CompositeAtom stblAtom) throws IOException { + // TO DO + DataAtom leaf; + QTFFImageOutputStream d; + + /* Sample Description atom ------- */ + // The sample description atom stores information that allows you to + // decode samples in the media. The data stored in the sample + // description varies, depending on the media type. For example, in the + // case of video media, the sample descriptions are image description + // structures. The sample description information for each media type is + // explained in “Media Data Atom Types”: + // http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_1.html#//apple_ref/doc/uid/TP40000939-CH205-SW1 + leaf = new DataAtom("stsd"); + stblAtom.add(leaf); + /* + typedef struct { + byte version; + byte[3] flags; + int numberOfEntries; + soundSampleDescriptionEntry sampleDescriptionTable[numberOfEntries]; + } soundSampleDescriptionAtom; + + typedef struct { + int size; + magic type; + byte[6] reserved; + short dataReferenceIndex; + soundSampleDescription data; + } soundSampleDescriptionEntry; + + typedef struct { + ushort version; + ushort revisionLevel; + uint vendor; + ushort numberOfChannels; + ushort sampleSize; + short compressionId; + ushort packetSize; + fixed16d16 sampleRate; + byte[] extendedData; + } soundSampleDescription; + */ + d = leaf.getOutputStream(); + + // soundSampleDescriptionAtom: + // --------------------------- + d.write(0); // version + // A 1-byte specification of the version of this sample description atom. + + d.write(0); // flag[0] + d.write(0); // flag[1] + d.write(0); // flag[2] + // A 3-byte space for sample description flags. Set this field to 0. + + d.writeInt(1); // number of Entries + // A 32-bit integer containing the number of sample descriptions that follow. + + // soundSampleDescriptionEntry: + // ---------------------------- + // A 32-bit integer indicating the number of bytes in the sample description. + d.writeUInt(4 + 12 + 20 + 16 + t.stsdExtensions.length); // sampleDescriptionTable[0].size + + // Common header: 12 bytes + d.writeType(t.mediaCompressionType); // sampleDescriptionTable[0].type + // A 32-bit integer indicating the format of the stored data. + // This depends on the media type, but is usually either the + // compression format or the media type. + + d.write(new byte[6]); // sampleDescriptionTable[0].reserved + // Six bytes that must be set to 0. + + d.writeUShort(1); // sampleDescriptionTable[0].dataReferenceIndex + // A 16-bit integer that contains the index of the data + // reference to use to retrieve data associated with samples + // that use this sample description. Data references are stored + // in data reference atoms. + + // Sound Sample Description (Version 0) 20 bytes + // ------------------------ + + d.writeUShort(1); // version + // A 16-bit integer that holds the sample description version (currently 0 or 1). + + d.writeUShort(0); // revisionLevel + // A 16-bit integer that must be set to 0. + + d.writeUInt(0); // vendor + // A 32-bit integer that must be set to 0. + + d.writeUShort(t.soundNumberOfChannels); // numberOfChannels + // A 16-bit integer that indicates the number of sound channels used by + // the sound sample. Set to 1 for monaural sounds, 2 for stereo sounds. + // Higher numbers of channels are not supported. + + d.writeUShort(t.soundSampleSize); // sampleSize (bits) + // A 16-bit integer that specifies the number of bits in each + // uncompressed sound sample. Allowable values are 8 or 16. Formats + // using more than 16 bits per sample set this field to 16 and use sound + // description version 1. + + d.writeUShort(t.soundCompressionId); // compressionId + // XXX - This must be set to -1, or the QuickTime player won't accept this file. + // A 16-bit integer that must be set to 0 for version 0 sound + // descriptions. This may be set to –2 for some version 1 sound + // descriptions; see “Redefined Sample Tables” (page 135). + + d.writeUShort(0); // packetSize + // A 16-bit integer that must be set to 0. + + d.writeFixed16D16(t.soundSampleRate); // sampleRate + // A 32-bit unsigned fixed-point number (16.16) that indicates the rate + // at which the sound samples were obtained. The integer portion of this + // number should match the media’s time scale. Many older version 0 + // files have values of 22254.5454 or 11127.2727, but most files have + // integer values, such as 44100. Sample rates greater than 2^16 are not + // supported. + + // Sound Sample Description Additional fields (only in Version 1) 16 bytes + // ------------------------ + d.writeUInt(t.soundSamplesPerPacket); // samplesPerPacket + // A 32-bit integer. + // The number of uncompressed samples generated by a + // compressed sample (an uncompressed sample is one sample + // from each channel). This is also the sample duration, + // expressed in the media’s timescale, where the + // timescale is equal to the sample rate. For + // uncompressed formats, this field is always 1. + // + d.writeUInt(t.soundBytesPerPacket); // bytesPerPacket + // A 32-bit integer. + // For uncompressed audio, the number of bytes in a + // sample for a single channel. This replaces the older + // sampleSize field, which is set to 16. + // This value is calculated by dividing the frame size + // by the number of channels. The same calculation is + // performed to calculate the value of this field for + // compressed audio, but the result of the calculation + // is not generally meaningful for compressed audio. + // + d.writeUInt(t.soundBytesPerFrame); // bytesPerFrame + // A 32-bit integer. + // The number of bytes in a sample: for uncompressed + // audio, an uncompressed frame; for compressed audio, a + // compressed frame. This can be calculated by + // multiplying the bytes per packet field by the number + // of channels. + // + d.writeUInt(t.soundBytesPerSample); // bytesPerSample + // A 32-bit integer. + // The size of an uncompressed sample in bytes. This is + // set to 1 for 8-bit audio, 2 for all other cases, even + // if the sample size is greater than 2 bytes. + + // Write stsd Extensions + // Extensions must be atom-based fields + // ------------------------------------ + d.write(t.stsdExtensions); + } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeReader.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeReader.java index 3a73656..a624f72 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeReader.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeReader.java @@ -13,6 +13,7 @@ import org.monte.media.av.Registry; import org.monte.media.math.Rational; import org.monte.media.util.ArrayUtil; +import org.monte.media.util.MathUtil; import javax.imageio.stream.ImageInputStream; import java.awt.image.BufferedImage; @@ -65,26 +66,26 @@ public QuickTimeReader(ImageInputStream in) throws IOException { } @Override - public long timeToSample(int track, Rational seconds) throws IOException { + public long findSampleAtTime(int track, Rational seconds) throws IOException { ensureRealized(); QuickTimeMeta.Track tr = meta.tracks.get(track); long timeStamp = seconds.multiply(meta.timeScale).longValue(); QuickTimeMeta.TrackSample key = new QuickTimeMeta.TrackSample(null, timeStamp, 0, timeStamp, 0); int result = Collections.binarySearch(tr.trackSamplesList, key, Comparator.comparingLong(a -> a.timeStamp)); if (result < 0) result = ~result - 1; - result = Math.clamp(result, 0, tr.trackSamplesList.size() - 1); + result = MathUtil.clamp(result, 0, tr.trackSamplesList.size() - 1); return result; } @Override - public Rational sampleToTime(int track, long sample) throws IOException { + public Rational getSampleTime(int track, long sample) throws IOException { ensureRealized(); QuickTimeMeta.Track tr = meta.tracks.get(track); return new Rational(tr.trackSamplesList.get((int) sample).timeStamp, meta.timeScale); } @Override - public Rational getDuration(int track, long sample) throws IOException { + public Rational getSampleDuration(int track, long sample) throws IOException { ensureRealized(); QuickTimeMeta.Track tr = meta.tracks.get(track); return new Rational(tr.trackSamplesList.get((int) sample).duration, meta.timeScale); @@ -162,7 +163,6 @@ public void read(int track, Buffer buffer) throws IOException { var ts = tr.trackSamplesList.get((int) tr.readIndex); var ms = ts.mediaSample; - // FIXME - This should be done using QuickTimeInputStream.readSample() in.seek(ms.offset); byte[] b; buffer.data = b = ArrayUtil.reuseByteArray(buffer.data, (int) ms.length); @@ -199,7 +199,7 @@ public void setMovieReadTime(Rational newValue) throws IOException { ensureRealized(); for (int t = 0, n = meta.tracks.size(); t < n; t++) { QuickTimeMeta.Track tr = meta.tracks.get(t); - int sample = (int) min(timeToSample(t, newValue), tr.media.sampleCount - 1); + int sample = (int) min(findSampleAtTime(t, newValue), tr.media.sampleCount - 1); for (; sample > 0 && !tr.trackSamplesList.get(sample).mediaSample.isKeyframe; sample--) ; tr.readIndex = sample; } @@ -208,16 +208,16 @@ public void setMovieReadTime(Rational newValue) throws IOException { @Override public Rational getReadTime(int track) throws IOException { QuickTimeMeta.Track tr = meta.tracks.get(track); - return sampleToTime(track, tr.readIndex); + return getSampleTime(track, tr.readIndex); } @Override - public Rational getDuration() throws IOException { - return new Rational(getMovieDuration(), getMovieTimeScale()); + public Rational getMovieDuration() throws IOException { + return new Rational(getMovieDurationInMovieTimeScale(), getMovieTimeScale()); } @Override - public Rational getDuration(int track) throws IOException { + public Rational getTrackDuration(int track) throws IOException { ensureRealized(); QuickTimeMeta.Track tr = meta.tracks.get(track); return new Rational(tr.duration, meta.timeScale); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeSerializer.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeSerializer.java deleted file mode 100755 index 55da3e5..0000000 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeSerializer.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * @(#)QuickTimeSerializer.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. - */ -package org.monte.media.quicktime; - -/** - * {@code QuickTimeSerializer} takes a {@code QuickTimeMovie} and flattens - * it into an {@code ImageOutputStream}. - *

- * This is in internal class of QuickTimeOutputStream. - *

- * FIXME - Implement me. - * - * @author Werner Randelshofer - */ -public class QuickTimeSerializer { - -} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriter.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriter.java index f0920bf..0159081 100755 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriter.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriter.java @@ -1,5 +1,5 @@ /* - * @(#)QuickTimeWriter.java + * @(#)MP4Writer.java * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. */ package org.monte.media.quicktime; @@ -7,17 +7,22 @@ import org.monte.media.av.Buffer; import org.monte.media.av.Codec; import org.monte.media.av.Format; +import org.monte.media.av.FormatKeys; import org.monte.media.av.FormatKeys.MediaType; import org.monte.media.av.MovieWriter; import org.monte.media.av.Registry; +import org.monte.media.av.codec.video.VideoFormatKeys; import org.monte.media.math.Rational; +import org.monte.media.qtff.AbstractQTFFMovieStream; +import org.monte.media.qtff.AvcDecoderConfigurationRecord; import javax.imageio.stream.ImageOutputStream; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; -import java.io.OutputStream; import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.List; import static org.monte.media.av.BufferFlag.DISCARD; import static org.monte.media.av.BufferFlag.KEYFRAME; @@ -56,8 +61,8 @@ * Supports writing of time-based video and audio data into a QuickTime movie * file (.MOV) without the need of native code. *

- * {@code QuickTimeWriter} works with tracks and samples. After creating a - * {@code QuickTimeWriter} one or more video and audio tracks can be added to + * {@code MP4Writer} works with tracks and samples. After creating a + * {@code MP4Writer} one or more video and audio tracks can be added to * it. Then samples can be written into the track(s). A sample is a single * element in a sequence of time-ordered data. For video data a sample typically * consists of a single video frame, for uncompressed stereo audio data a sample @@ -86,14 +91,14 @@ * For convenience, this class has built-in encoders for video frames in the * following formats: RAW, ANIMATION, JPEG and PNG. Media data in other formats, * including all audio data, must be encoded before it can be written with - * {@code QuickTimeWriter}. Alternatively, you can plug in your own codec. + * {@code MP4Writer}. Alternatively, you can plug in your own codec. *

* Example: Writing 10 seconds of a movie with 640x480 pixel, 30 fps, * PNG-encoded video and 16-bit stereo, 44100 Hz, PCM-encoded audio. *

- * QuickTimeWriter w = new QuickTimeWriter(new File("mymovie.mov"));
+ * MP4Writer w = new MP4Writer(new File("mymovie.mov"));
  * w.addAudioTrack(new AudioFormat(AudioFormat.Encoding.PCM_SIGNED), 44100, 2, 16, 2, 44100, true)); // audio in track 0
- * w.addVideoTrack(QuickTimeWriter.VIDEO_PNG, 30, 640, 480);  // video in track 1
+ * w.addVideoTrack(MP4Writer.VIDEO_PNG, 30, 640, 480);  // video in track 1
  *
  * // calculate total movie sampleDuration in media time units for each track
  * long atmax = w.getMediaTimeScale(0) * 10;
@@ -149,6 +154,16 @@
  * @author Werner Randelshofer
  */
 public class QuickTimeWriter extends QuickTimeOutputStream implements MovieWriter {
+    private static class TrackEncoder {
+        /**
+         * The codec.
+         */
+        public Codec codec;
+        public Buffer outputBuffer;
+        public Buffer inputBuffer;
+    }
+
+    private List trackEncoders = new ArrayList<>();
 
     public final static Format QUICKTIME = new Format(MediaTypeKey, MediaType.FILE, MimeTypeKey, MIME_QUICKTIME);
     public final static Format VIDEO_RAW = new Format(
@@ -171,6 +186,7 @@ public class QuickTimeWriter extends QuickTimeOutputStream implements MovieWrite
             MimeTypeKey, MIME_QUICKTIME,
             EncodingKey, ENCODING_QUICKTIME_PNG, //
             CompressorNameKey, COMPRESSOR_NAME_QUICKTIME_PNG);
+
     /**
      * Creates a new QuickTime writer.
      *
@@ -190,7 +206,6 @@ public QuickTimeWriter(ImageOutputStream out) throws IOException {
     }
 
 
-
     @Override
     public Format getFileFormat() throws IOException {
         return QUICKTIME;
@@ -211,7 +226,7 @@ public Format getFormat(int track) {
     public int addTrack(Format fmt) throws IOException {
         if (fmt.get(MediaTypeKey) == MediaType.VIDEO) {
             int t = addVideoTrack(fmt.get(EncodingKey),
-                    fmt.get(CompressorNameKey, fmt.get(EncodingKey)),
+                    fmt.get(CompressorNameKey, AbstractQTFFMovieStream.DEFAULT_COMPONENT_NAME),
                     Math.min(6000, fmt.get(FrameRateKey).getNumerator() * fmt.get(FrameRateKey).getDenominator()),
                     fmt.get(WidthKey), fmt.get(HeightKey), fmt.get(DepthKey, 24),
                     (int) fmt.get(FrameRateKey).getDenominator(), fmt);
@@ -225,7 +240,7 @@ public int addTrack(Format fmt) throws IOException {
             String encoding = fmt.get(EncodingKey, null);
             Rational frameRate = fmt.get(FrameRateKey, fmt.get(SampleRateKey));
             int channels = fmt.get(ChannelsKey, 1);
-            int frameSize = fmt.get(FrameSizeKey, (sampleSizeInBits + 7) / 8 * sampleSizeInBits);
+            int frameSize = fmt.get(FrameSizeKey, (sampleSizeInBits + 7) / 8);
             if (encoding == null || encoding.length() != 4) {
                 if (signed) {
                     encoding = bo == ByteOrder.BIG_ENDIAN ? "twos" : "sowt";
@@ -367,7 +382,7 @@ public int getTrackCount() {
      */
     @Override
     public Rational getDuration(int track) {
-        AbstractQuickTimeStream.Track tr = tracks.get(track);
+        AbstractQTFFMovieStream.Track tr = tracks.get(track);
         return new Rational(tr.mediaDuration, tr.mediaTimeScale);
     }
 
@@ -376,36 +391,32 @@ private Codec createCodec(Format fmt) {
     }
 
     private void createCodec(int track) {
-        AbstractQuickTimeStream.Track tr = tracks.get(track);
+        AbstractQTFFMovieStream.Track tr = tracks.get(track);
+        TrackEncoder tre = getTrackEncoder(track);
         Format fmt = tr.format;
-        tr.codec = createCodec(fmt);
-        String enc = fmt.get(EncodingKey);
-        if (tr.codec != null) {
+        tre.codec = createCodec(fmt);
+        if (tre.codec != null) {
             if (fmt.get(MediaTypeKey) == MediaType.VIDEO) {
-                Format vf = fmt;
-                tr.codec.setInputFormat(fmt.prepend(
+                tre.codec.setInputFormat(fmt.prepend(
                         MimeTypeKey, MIME_JAVA, EncodingKey, ENCODING_BUFFERED_IMAGE,
                         DataClassKey, BufferedImage.class));
 
-                if (null == tr.codec.setOutputFormat(
+                if (null == tre.codec.setOutputFormat(
                         fmt.prepend(
                                 QualityKey, getCompressionQuality(track),
                                 MimeTypeKey, MIME_QUICKTIME,
                                 DataClassKey, byte[].class))) {
                     throw new UnsupportedOperationException("Input format not supported:" + fmt);
                 }
-                //tr.codec.setQuality(tr.videoQuality);
             } else {
-                Format vf = fmt;
-                tr.codec.setInputFormat(fmt.prepend(
+                tre.codec.setInputFormat(fmt.prepend(
                         MimeTypeKey, MIME_JAVA, EncodingKey, fmt.containsKey(SignedKey) && fmt.get(SignedKey) ? ENCODING_PCM_SIGNED : ENCODING_PCM_UNSIGNED,
                         DataClassKey, byte[].class));
-                if (tr.codec.setOutputFormat(fmt) == null) {
-                    throw new UnsupportedOperationException("Codec output format not supported:" + fmt + " codec:" + tr.codec);
+                if (tre.codec.setOutputFormat(fmt) == null) {
+                    throw new UnsupportedOperationException("Codec output format not supported:" + fmt + " codec:" + tre.codec);
                 } else {
-                    tr.format = tr.codec.getOutputFormat();
+                    tr.format = tre.codec.getOutputFormat();
                 }
-                //tr.codec.setQuality(tr.dwQuality);
             }
         }
     }
@@ -414,14 +425,14 @@ QualityKey, getCompressionQuality(track),
      * Returns the codec of the specified track.
      */
     public Codec getCodec(int track) {
-        return tracks.get(track).codec;
+        return getTrackEncoder(track).codec;
     }
 
     /**
      * Sets the codec for the specified track.
      */
     public void setCodec(int track, Codec codec) {
-        tracks.get(track).codec = codec;
+        getTrackEncoder(track).codec = codec;
     }
 
     /**
@@ -434,54 +445,63 @@ public void setCodec(int track, Codec codec) {
     @Override
     public void write(int track, Buffer buf) throws IOException {
         ensureStarted();
-        AbstractQuickTimeStream.Track tr = tracks.get(track);
+        AbstractQTFFMovieStream.Track tr = tracks.get(track);
+        TrackEncoder tre = getTrackEncoder(track);
 
         // Encode sample data
-        {
-            if (tr.outputBuffer == null) {
-                tr.outputBuffer = new Buffer();
-                tr.outputBuffer.format = tr.format;
-            }
-            Buffer outBuf;
-            if (tr.format.matchesWithout(buf.format, FrameRateKey)) {
-                outBuf = buf;
-            } else {
-                outBuf = tr.outputBuffer;
-                boolean isSync = tr.syncInterval == 0 ? false : tr.sampleCount % tr.syncInterval == 0;
-                buf.setFlag(KEYFRAME, isSync);
-                if (tr.codec == null) {
-                    createCodec(track);
-                    if (tr.codec == null) {
-                        throw new UnsupportedOperationException("No codec for this format " + tr.format);
-                    }
-                }
 
-                tr.codec.process(buf, outBuf);
-            }
-            if (outBuf.isFlag(DISCARD) || outBuf.sampleCount == 0) {
-                return;
+        if (tre.outputBuffer == null) {
+            tre.outputBuffer = new Buffer();
+        }
+        Buffer outBuf;
+        if (tr.format.matchesWithout(buf.format, FrameRateKey)) {
+            outBuf = buf;
+        } else {
+            outBuf = tre.outputBuffer;
+            if (tre.codec == null) {
+                createCodec(track);
+                if (tre.codec == null) {
+                    throw new UnsupportedOperationException("No codec for this format " + tr.format);
+                }
             }
+            //FIXME we assume a single-step encoding process
+            tre.codec.process(buf, outBuf);
+        }
+        if (outBuf.isFlag(DISCARD) || outBuf.sampleCount == 0) {
+            return;
+        }
 
-            // Compute sample sampleDuration in media time scale
-            Rational sampleDuration;
-            if (tr.inputTime == null) {
-                tr.inputTime = buf.timeStamp;
-                tr.writeTime = buf.timeStamp;
-            }
-            Rational exactSampleDuration = outBuf.sampleDuration.multiply(outBuf.sampleCount);
-            sampleDuration = exactSampleDuration.floor(tr.mediaTimeScale);
-            if (sampleDuration.compareTo(new Rational(0, 1)) <= 0) {
-                sampleDuration = new Rational(1, tr.mediaTimeScale);
-            }
-            tr.writeTime = tr.writeTime.add(sampleDuration);
-            long sampleDurationInMediaTS = sampleDuration.getNumerator() * (tr.mediaTimeScale / sampleDuration.getDenominator());
+        if (tr.startTime == null) {
+            tr.startTime = buf.timeStamp;
+        }
 
-            writeSamples(track, buf.sampleCount, (byte[]) outBuf.data, outBuf.offset, outBuf.length,
-                    sampleDurationInMediaTS / buf.sampleCount, outBuf.isFlag(KEYFRAME));
+
+        // Compute sample sampleDuration in media time scale
+        Rational sampleDuration;
+        Rational exactSampleDuration = outBuf.sampleDuration.multiply(outBuf.sampleCount);
+        sampleDuration = exactSampleDuration.floor(tr.mediaTimeScale);
+        if (sampleDuration.compareTo(new Rational(0, 1)) <= 0) {
+            sampleDuration = new Rational(1, tr.mediaTimeScale);
+        }
+        long sampleDurationInMediaTS = sampleDuration.getNumerator() * (tr.mediaTimeScale / sampleDuration.getDenominator());
+
+        writeSamples(track, outBuf.sampleCount, (byte[]) outBuf.data, outBuf.offset, outBuf.length,
+                sampleDurationInMediaTS / outBuf.sampleCount, outBuf.isFlag(KEYFRAME));
+
+        if (outBuf.header instanceof AvcDecoderConfigurationRecord) {
+            AvcDecoderConfigurationRecord r = (AvcDecoderConfigurationRecord) outBuf.header;
+            writeAvcDecoderConfigurationRecord(track, r);
         }
     }
 
 
+    private TrackEncoder getTrackEncoder(int track) {
+        while (trackEncoders.size() <= track) {
+            trackEncoders.add(new TrackEncoder());
+        }
+        return trackEncoders.get(track);
+    }
+
 
     /**
      * Encodes an image as a video frame and writes it into a video track.
@@ -493,61 +513,17 @@ public void write(int track, Buffer buf) throws IOException {
      * @throws IOException if writing the sample data failed.
      */
     public void write(int track, BufferedImage image, long duration) throws IOException {
-        if (duration <= 0) {
-            throw new IllegalArgumentException("Duration must be greater 0.");
-        }
-        AbstractQuickTimeStream.VideoTrack vt = (AbstractQuickTimeStream.VideoTrack) tracks.get(track); // throws index out of bounds exception if illegal track index
-        if (vt.mediaType != MediaType.VIDEO) {
-            throw new IllegalArgumentException("Track " + track + " is not a video track");
-        }
-        if (vt.codec == null) {
-            createCodec(track);
-        }
-        if (vt.codec == null) {
-            throw new UnsupportedOperationException("No codec for this format: " + vt.format);
-        }
-        ensureStarted();
-
-        // Get the dimensions of the first image
-        if (vt.width == -1) {
-            vt.width = image.getWidth();
-            vt.height = image.getHeight();
-        } else {
-            // The dimension of the image must match the dimension of the video track
-            if (vt.width != image.getWidth() || vt.height != image.getHeight()) {
-                throw new IllegalArgumentException("Dimensions of frame[" + tracks.get(track).getSampleCount()
-                        + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
-                        + ") differs from video dimension (width="
-                        + vt.width + ", height=" + vt.height + ") in track " + track + ".");
-            }
-        }
-
-        // Encode pixel data
-        {
-
-            if (vt.outputBuffer == null) {
-                vt.outputBuffer = new Buffer();
-            }
-
-            boolean isSync = vt.syncInterval == 0 ? false : vt.sampleCount % vt.syncInterval == 0;
-
-            Buffer inputBuffer = new Buffer();
-            inputBuffer.setFlag(KEYFRAME, isSync);
-            inputBuffer.data = image;
-            vt.codec.process(inputBuffer, vt.outputBuffer);
-            if (vt.outputBuffer.isFlag(DISCARD)) {
-                return;
-            }
-
-            isSync = vt.outputBuffer.isFlag(KEYFRAME);
-
-            long offset = getRelativeStreamPosition();
-            OutputStream mdatOut = mdatAtom.getOutputStream();
-            mdatOut.write((byte[]) vt.outputBuffer.data, vt.outputBuffer.offset, vt.outputBuffer.length);
-
-            long length = getRelativeStreamPosition() - offset;
-            vt.addSample(new AbstractQuickTimeStream.Sample(duration, offset, length), 1, isSync);
-        }
+        Track tr = tracks.get(track);
+
+        Buffer buf = new Buffer();
+        buf.data = image;
+        buf.sampleDuration = Rational.valueOf(duration, tr.mediaTimeScale);
+        buf.format = new Format(FormatKeys.MediaTypeKey, FormatKeys.MediaType.VIDEO,
+                VideoFormatKeys.DataClassKey, BufferedImage.class,
+                WidthKey, image.getWidth(),
+                HeightKey, image.getHeight()
+        );
+        write(track, buf);
     }
 
     /**
@@ -591,25 +567,26 @@ public void write(int track, byte[] data, int off, int len, long duration, boole
      */
     @Deprecated
     public void write(int track, int sampleCount, byte[] data, int off, int len, long sampleDuration, boolean isSync) throws IOException {
-        AbstractQuickTimeStream.Track tr = tracks.get(track);
-        if (tr.codec == null) {
+        AbstractQTFFMovieStream.Track tr = tracks.get(track);
+        TrackEncoder tre = getTrackEncoder(track);
+        if (tre.codec == null) {
             writeSamples(track, sampleCount, data, off, len, sampleDuration, isSync);
         } else {
-            if (tr.outputBuffer == null) {
-                tr.outputBuffer = new Buffer();
+            if (tre.outputBuffer == null) {
+                tre.outputBuffer = new Buffer();
             }
-            if (tr.inputBuffer == null) {
-                tr.inputBuffer = new Buffer();
+            if (tre.inputBuffer == null) {
+                tre.inputBuffer = new Buffer();
             }
-            Buffer outb = tr.outputBuffer;
-            Buffer inb = tr.inputBuffer;
+            Buffer outb = tre.outputBuffer;
+            Buffer inb = tre.inputBuffer;
             inb.data = data;
             inb.offset = off;
             inb.length = len;
             inb.sampleDuration = new Rational(sampleDuration, tr.mediaTimeScale);
             inb.sampleCount = sampleCount;
             inb.setFlag(KEYFRAME, isSync);
-            tr.codec.process(inb, outb);
+            tre.codec.process(inb, outb);
             if (!outb.isFlag(DISCARD)) {
                 writeSample(track, (byte[]) outb.data, outb.offset, outb.length, outb.sampleCount, outb.isFlag(KEYFRAME));
             }
diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriterSpi.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriterSpi.java
index 035393f..17577d2 100755
--- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriterSpi.java
+++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/QuickTimeWriterSpi.java
@@ -1,5 +1,5 @@
 /*
- * @(#)QuickTimeWriterSpi.java
+ * @(#)MP4WriterSpi.java
  * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License.
  */
 package org.monte.media.quicktime;
@@ -14,7 +14,7 @@
 import java.util.List;
 
 /**
- * QuickTimeWriterSpi.
+ * MP4WriterSpi.
  *
  * @author Werner Randelshofer
  */
diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/AppleClosedCaptionCodec.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/AppleClosedCaptionCodec.java
index fbc5d19..e344692 100644
--- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/AppleClosedCaptionCodec.java
+++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/AppleClosedCaptionCodec.java
@@ -10,7 +10,7 @@
 import org.monte.media.av.FormatKeys;
 import org.monte.media.av.codec.text.AbstractTextCodec;
 import org.monte.media.io.ByteArrayImageInputStream;
-import org.monte.media.quicktime.QTFFImageInputStream;
+import org.monte.media.qtff.QTFFImageInputStream;
 import org.monte.media.quicktime.codec.text.cta608.Cta608Memory;
 import org.monte.media.quicktime.codec.text.cta608.Cta608Parser;
 import org.monte.media.quicktime.codec.text.cta608.Cta608Token;
diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/cta608/Cta608Parser.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/cta608/Cta608Parser.java
index 819e404..c21f128 100644
--- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/cta608/Cta608Parser.java
+++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/quicktime/codec/text/cta608/Cta608Parser.java
@@ -66,7 +66,7 @@ public class Cta608Parser {
      * 

*/ public List parse(InputStream in) throws IOException { - ImageInputStream iis = in instanceof ImageInputStream s ? s : new UncachedImageInputStream(in); + ImageInputStream iis = in instanceof ImageInputStream ? (ImageInputStream) in : new UncachedImageInputStream(in); return parse(iis); } @@ -124,11 +124,13 @@ public String toStringWithOpCodes(List tokens) throws IOException { StringBuilder buf = new StringBuilder(); for (Cta608Token token : tokens) { if (!buf.isEmpty()) buf.append(' '); - if (token instanceof TextToken tx) { + if (token instanceof TextToken) { + TextToken tx = (TextToken) token; buf.append('"'); buf.append(tx.getText().replaceAll("\"", "\\\"")); buf.append('"'); - } else if (token instanceof CmdToken ct) { + } else if (token instanceof CmdToken) { + CmdToken ct = (CmdToken) token; buf.append("{Cmd:C"); buf.append(ct.getChannel()); buf.append(':'); @@ -138,7 +140,8 @@ public String toStringWithOpCodes(List tokens) throws IOException { buf.append(ct.getOperation()); } buf.append('}'); - } else if (token instanceof PacToken ct) { + } else if (token instanceof PacToken) { + PacToken ct = (PacToken) token; buf.append("{Pac:C"); buf.append(ct.getChannel()); buf.append(":R"); @@ -294,85 +297,84 @@ public void updateMemory(List tokens, Cta608Memory memory) throws I Point pos = new Point(0, 0); Cta608CharAttr attr = Cta608Screen.DEFAULT_ATTR; for (Cta608Token token : tokens) { - switch (token) { - case TextToken tx -> { - pos = memory.nonDisplayed.write(pos, attr, tx.getText()); - } - case CmdToken cmd -> { - CmdToken.Command op = cmd.getOperation(); - attr = attr.withBackground(bgCommandMap.get(op)); - attr = attr.withForeground(fgCommandMap.get(op)); - attr = attr.withUnderline(underlineCommandMap.get(op)); - attr = attr.withItalics(italicsCommandMap.get(op)); + if (Objects.requireNonNull(token) instanceof TextToken) { + TextToken tx = (TextToken) Objects.requireNonNull(token); + pos = memory.nonDisplayed.write(pos, attr, tx.getText()); + } else if (token instanceof CmdToken) { + CmdToken cmd = (CmdToken) token; + CmdToken.Command op = cmd.getOperation(); + attr = attr.withBackground(bgCommandMap.get(op)); + attr = attr.withForeground(fgCommandMap.get(op)); + attr = attr.withUnderline(underlineCommandMap.get(op)); + attr = attr.withItalics(italicsCommandMap.get(op)); - switch (op) { - case RCL -> { - memory.nonDisplayed.style = Cta608Style.POP_ON; - } - case BS -> { - pos.x = Math.max(pos.x - 1, 0); - memory.nonDisplayed.write(pos, attr, "\0"); - } - case DER -> { - memory.nonDisplayed.deleteToEndOfRow(pos); - } - case RU2 -> { - memory.nonDisplayed.rollUp(2); - } - case RU3 -> { - memory.nonDisplayed.rollUp(3); - } - case RU4 -> { - memory.nonDisplayed.rollUp(4); - } - case RDC -> { - memory.nonDisplayed.style = Cta608Style.PAINT_ON; - } - case TR -> { - memory.nonDisplayed.textRestart(); - } - case RTD -> { - pos.x = 0; - pos.y = 0; - } - case EDM -> { - memory.displayed.erase(); - } - case CR -> { - pos.x = 0; - if (pos.y == Cta608Screen.HEIGHT - 1) { - memory.nonDisplayed.rollUp(1); - } else { - pos.y = pos.y + 1; - } - } - case ENM -> { - memory.nonDisplayed.erase(); - } - case EOC -> { - memory.flipMemories(); - } - case TO1 -> { - pos.x = Math.min(Cta608Screen.WIDTH - 1, pos.x + 1); - } - case TO2 -> { - pos.x = Math.min(Cta608Screen.WIDTH - 1, pos.x + 2); - } - case TO3 -> { - pos.x = Math.min(Cta608Screen.WIDTH - 1, pos.x + 3); + switch (op) { + case RCL -> { + memory.nonDisplayed.style = Cta608Style.POP_ON; + } + case BS -> { + pos.x = Math.max(pos.x - 1, 0); + memory.nonDisplayed.write(pos, attr, "\0"); + } + case DER -> { + memory.nonDisplayed.deleteToEndOfRow(pos); + } + case RU2 -> { + memory.nonDisplayed.rollUp(2); + } + case RU3 -> { + memory.nonDisplayed.rollUp(3); + } + case RU4 -> { + memory.nonDisplayed.rollUp(4); + } + case RDC -> { + memory.nonDisplayed.style = Cta608Style.PAINT_ON; + } + case TR -> { + memory.nonDisplayed.textRestart(); + } + case RTD -> { + pos.x = 0; + pos.y = 0; + } + case EDM -> { + memory.displayed.erase(); + } + case CR -> { + pos.x = 0; + if (pos.y == Cta608Screen.HEIGHT - 1) { + memory.nonDisplayed.rollUp(1); + } else { + pos.y = pos.y + 1; } } - } - case PacToken pac -> { - pos.y = pac.getRow() - 1; - PacToken.Attributes ta = pac.getTextAttributes(); - attr = attr.withForeground(pacFgCommandMap.get(ta)); - attr = attr.withItalics(pacItalicsCommandMap.get(ta)); - Integer indent = pacIndentCommandMap.get(ta); - if (indent != null) { - pos.x = indent; + case ENM -> { + memory.nonDisplayed.erase(); + } + case EOC -> { + memory.flipMemories(); + } + case TO1 -> { + pos.x = Math.min(Cta608Screen.WIDTH - 1, pos.x + 1); + } + case TO2 -> { + pos.x = Math.min(Cta608Screen.WIDTH - 1, pos.x + 2); + } + case TO3 -> { + pos.x = Math.min(Cta608Screen.WIDTH - 1, pos.x + 3); } } + } else if (token instanceof PacToken) { + PacToken pac = (PacToken) token; + pos.y = pac.getRow() - 1; + PacToken.Attributes ta = pac.getTextAttributes(); + attr = attr.withForeground(pacFgCommandMap.get(ta)); + attr = attr.withItalics(pacItalicsCommandMap.get(ta)); + Integer indent = pacIndentCommandMap.get(ta); + if (indent != null) { + pos.x = indent; + } } } } @@ -393,7 +395,9 @@ public String toHtml(Cta608Memory memory) throws IOException { int minX = screen.getMinX(y); int indent; if (textBox != null && y == textBox.y) { - buf.repeat(" ", textBox.x); + for (int i = 0; i < textBox.x; i++) { + buf.append(" "); + } buf.append("

tagName) { } buf.append(d[i]); } - } else if (data instanceof Object[] d) { + } else if (data instanceof Object[]) { + Object[] d = (Object[]) data; for (int i = 0; i < d.length; i++) { if (i != 0) { buf.append(','); diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ArrayUtil.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ArrayUtil.java index 80e392a..4801b76 100644 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ArrayUtil.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ArrayUtil.java @@ -24,7 +24,7 @@ private ArrayUtil() { * @return the object if it is an array of the desired type with the specified minimal length, or a new array */ public static byte[] reuseByteArray(Object obj, int minLength) { - return (obj instanceof byte[] b && b.length >= minLength) ? b : new byte[minLength]; + return (obj instanceof byte[] && ((byte[]) obj).length >= minLength) ? (byte[]) obj : new byte[minLength]; } /** @@ -36,7 +36,7 @@ public static byte[] reuseByteArray(Object obj, int minLength) { * @return the object if it is an array of the desired type with the specified minimal length, or a new array */ public static short[] reuseShortArray(Object obj, int minLength) { - return (obj instanceof short[] b && b.length >= minLength) ? b : new short[minLength]; + return (obj instanceof short[] && ((short[]) obj).length >= minLength) ? (short[]) obj : new short[minLength]; } /** @@ -48,6 +48,29 @@ public static short[] reuseShortArray(Object obj, int minLength) { * @return the object if it is an array of the desired type with the specified minimal length, or a new array */ public static int[] reuseIntArray(Object obj, int minLength) { - return (obj instanceof int[] b && b.length >= minLength) ? b : new int[minLength]; + return (obj instanceof int[] && ((int[]) obj).length >= minLength) ? (int[]) obj : new int[minLength]; + } + + /** + * Copies the specified array, truncating or padding with zeros (if necessary) + * so the copy has the specified length. + * + * @param original the array to be copied + * @param offset the offset in the original array + * @param newLength the length of the copy to be returned + * @return a copy of the original array, truncated or padded with zeros + * to obtain the specified length + * @throws NegativeArraySizeException if {@code newLength} is negative + * @throws NullPointerException if {@code original} is null + * @since 1.6 + */ + public static byte[] copyOf(byte[] original, int offset, int newLength) { + if (offset == 0 && newLength == original.length) { + return original.clone(); + } + byte[] copy = new byte[newLength]; + System.arraycopy(original, offset, copy, 0, + Math.min(original.length, newLength)); + return copy; } } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArray.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArray.java new file mode 100644 index 0000000..c9d7503 --- /dev/null +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArray.java @@ -0,0 +1,39 @@ +/* + * @(#)ByteArray.java + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. + */ + +package org.monte.media.util; + +import java.util.Arrays; + +/** + * Wrapper for a byte array. + */ +public class ByteArray { + private final byte[] array; + + public ByteArray(byte[] array) { + this.array = array; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ByteArray byteArray = (ByteArray) o; + return Arrays.equals(array, byteArray.array); + } + + @Override + public int hashCode() { + return Arrays.hashCode(array); + } + + /** + * Gets the underlying array. + */ + public byte[] getArray() { + return array; + } +} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArray.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArrays.java similarity index 97% rename from org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArray.java rename to org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArrays.java index f8c1431..9819dae 100644 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/io/ByteArray.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/ByteArrays.java @@ -1,9 +1,9 @@ /* - * @(#)ByteArray.java - * Copyright © 2023 Werner Randelshofer, Switzerland. MIT License. + * @(#)ByteArrays.java + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. */ -package org.monte.media.io; +package org.monte.media.util; import java.lang.invoke.MethodHandles; import java.lang.invoke.VarHandle; @@ -12,7 +12,7 @@ /** * Utility methods for reading/writing primitive values into byte arrays. */ -public class ByteArray { +public class ByteArrays { private static final VarHandle SHORT_LE = MethodHandles.byteArrayViewVarHandle(short[].class, ByteOrder.LITTLE_ENDIAN); private static final VarHandle SHORT_BE = MethodHandles.byteArrayViewVarHandle(short[].class, ByteOrder.BIG_ENDIAN); private static final VarHandle INT_LE = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.LITTLE_ENDIAN); @@ -25,7 +25,7 @@ public class ByteArray { /** * Don't let anyone instantiate this class. */ - private ByteArray() { + private ByteArrays() { } diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/MathUtil.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/MathUtil.java new file mode 100644 index 0000000..5acedec --- /dev/null +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/util/MathUtil.java @@ -0,0 +1,33 @@ +/* + * @(#)MathUtil.java + * Copyright © 2024 Werner Randelshofer, Switzerland. MIT License. + */ + +package org.monte.media.util; + +/** + * Math utilities. + */ +public class MathUtil { + /** + * Don't let anyone instantiate this class. + */ + private MathUtil() { + + } + + @SuppressWarnings("ManualMinMaxCalculation") + public static double clamp(double v, double min, double max) { + return v < min ? min : v > max ? max : v; + } + + @SuppressWarnings("ManualMinMaxCalculation") + public static float clamp(float v, float min, float max) { + return v < min ? min : v > max ? max : v; + } + + @SuppressWarnings("ManualMinMaxCalculation") + public static int clamp(int v, int min, int max) { + return v < min ? min : v > max ? max : v; + } +} diff --git a/org.monte.media/src/main/java/org.monte.media/org/monte/media/zipmovie/ZipMovieWriter.java b/org.monte.media/src/main/java/org.monte.media/org/monte/media/zipmovie/ZipMovieWriter.java index 0022da4..49c360c 100644 --- a/org.monte.media/src/main/java/org.monte.media/org/monte/media/zipmovie/ZipMovieWriter.java +++ b/org.monte.media/src/main/java/org.monte.media/org/monte/media/zipmovie/ZipMovieWriter.java @@ -14,6 +14,7 @@ import org.monte.media.av.Registry; import org.monte.media.av.codec.video.VideoFormatKeys; import org.monte.media.math.Rational; +import org.monte.media.util.MathUtil; import java.awt.image.BufferedImage; import java.io.File; @@ -166,7 +167,7 @@ public boolean isEmpty(int track) { public void setCompressionQuality(int track, float newValue) { Track t = tracks.get(track); - t.quality = Math.clamp((int) (newValue * 10_000f), 0, 10_000); + t.quality = MathUtil.clamp((int) (newValue * 10_000f), 0, 10_000); } /** @@ -176,7 +177,7 @@ public void setCompressionQuality(int track, float newValue) { */ public float getCompressionQuality(int track) { Track t = tracks.get(track); - return t.quality == -1 ? 0.97f : Math.clamp(t.quality / 10_000f, 0f, 1f); + return t.quality == -1 ? 0.97f : MathUtil.clamp(t.quality / 10_000f, 0f, 1f); } private Codec createCodec(Format fmt) { diff --git a/org.monte.media/src/test/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecTest.java b/org.monte.media/src/test/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecTest.java index f166cf9..0777a43 100644 --- a/org.monte.media/src/test/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecTest.java +++ b/org.monte.media/src/test/java/org.monte.media/org/monte/media/av/codec/video/TechSmithCodecTest.java @@ -8,7 +8,7 @@ import org.junit.jupiter.api.Test; import org.monte.media.av.Buffer; import org.monte.media.av.Format; -import org.monte.media.io.SeekableByteArrayOutputStream; +import org.monte.media.io.ByteArrayImageOutputStream; import java.awt.BasicStroke; import java.awt.Color; @@ -25,11 +25,11 @@ public void shouldEncodeDecode24BitKeyFrame() throws IOException { int width = 40, height = 30; int[] rgb24 = toRgb24(createFrame(width, height, 0, BufferedImage.TYPE_INT_RGB), true); TechSmithCodecCore codec = new TechSmithCodecCore(); - SeekableByteArrayOutputStream encoded = new SeekableByteArrayOutputStream(); + ByteArrayImageOutputStream encoded = new ByteArrayImageOutputStream(); codec.encodeKey24(encoded, rgb24, width, height, 0, width); byte[] encodedBytes = encoded.getBuffer(); int[] actualPixels = new int[width * height]; - codec.decode24(encodedBytes, 0, encoded.size(), actualPixels, null, width, height, true); + codec.decode24(encodedBytes, 0, (int) encoded.length(), actualPixels, null, width, height, true); assertArrayEquals(rgb24, actualPixels); } @@ -42,11 +42,11 @@ public void shouldEncodeDecode16BitKeyFrame() throws IOException { int[] rgb24 = toRgb24(frame24, true); short[] rgb16 = toRgb16(frame16); TechSmithCodecCore codec = new TechSmithCodecCore(); - SeekableByteArrayOutputStream encoded = new SeekableByteArrayOutputStream(); + ByteArrayImageOutputStream encoded = new ByteArrayImageOutputStream(); codec.encodeKey16(encoded, rgb16, width, height, 0, width); byte[] encodedBytes = encoded.getBuffer(); int[] actualPixels = new int[width * height]; - codec.decode16(encodedBytes, 0, encoded.size(), actualPixels, null, width, height, true); + codec.decode16(encodedBytes, 0, (int) encoded.length(), actualPixels, null, width, height, true); assertArrayEquals(rgb24, actualPixels); } @@ -58,14 +58,14 @@ public void shouldEncodeDecode8BitKeyFrame() throws IOException { int[] rgb24 = toRgb24(frame24, false); byte[] rgb8 = toRgb8(frame8); TechSmithCodecCore codec = new TechSmithCodecCore(); - SeekableByteArrayOutputStream encoded = new SeekableByteArrayOutputStream(); + ByteArrayImageOutputStream encoded = new ByteArrayImageOutputStream(); codec.encodeKey8(encoded, rgb8, width, height, 0, width); byte[] encodedBytes = encoded.getBuffer(); int[] actualPixels = new int[width * height]; int[] palette = new int[256]; ((IndexColorModel) frame8.getColorModel()).getRGBs(palette); codec.setPalette(palette); - codec.decode8(encodedBytes, 0, encoded.size(), actualPixels, null, width, height, true); + codec.decode8(encodedBytes, 0, (int) encoded.length(), actualPixels, null, width, height, true); assertArrayEquals(rgb24, actualPixels); } diff --git a/pom.xml b/pom.xml index 31db9f0..d8037ed 100644 --- a/pom.xml +++ b/pom.xml @@ -81,8 +81,8 @@ - 21 - 21 + 17 + 17 UTF-8 UTF-8 ${git.commit.time}