Produces short, highlight reels from long videos using scene detection AI Java

👤 Sharing: AI
```java
import java.io.File;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;

import org.bytedeco.javacv.*;  // Import javacv
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.IntPointer;
import org.bytedeco.javacpp.avcodec;

public class HighlightReelGenerator {

    private static final double SCENE_CHANGE_THRESHOLD = 20.0; // Adjust this! Higher = Fewer cuts
    private static final double MIN_HIGHLIGHT_DURATION_SEC = 3.0; // Minimum duration of a highlight (seconds)
    private static final double MAX_HIGHLIGHT_DURATION_SEC = 15.0; // Max duration of a highlight (seconds)
    private static final String OUTPUT_FILE_EXTENSION = ".mp4"; //Output file format

    public static void main(String[] args) {
        if (args.length != 1) {
            System.out.println("Usage: java HighlightReelGenerator <input_video_file>");
            return;
        }

        String inputVideoPath = args[0];
        String outputDirectory = "highlights"; // Creates a folder for the outputs.
        File outputDirFile = new File(outputDirectory);

        if (!outputDirFile.exists()) {
            if (outputDirFile.mkdir()) {
                System.out.println("Created directory: " + outputDirectory);
            } else {
                System.err.println("Failed to create directory: " + outputDirectory);
                return;
            }
        }


        try {
            List<SceneCut> sceneCuts = detectSceneChanges(inputVideoPath, SCENE_CHANGE_THRESHOLD);
            List<Highlight> highlights = generateHighlights(sceneCuts, MIN_HIGHLIGHT_DURATION_SEC, MAX_HIGHLIGHT_DURATION_SEC);
            generateHighlightReel(inputVideoPath, highlights, outputDirectory);

            System.out.println("Highlight reel generated successfully!");

        } catch (IOException e) {
            System.err.println("Error processing video: " + e.getMessage());
            e.printStackTrace();
        }
    }


    /**
     * Detects scene changes in a video file using frame differencing.
     * @param videoPath The path to the input video file.
     * @param threshold The threshold for detecting scene changes. Adjust this value for sensitivity.
     * @return A list of SceneCut objects representing the detected scene changes.
     * @throws IOException If an error occurs while reading the video file.
     */
    public static List<SceneCut> detectSceneChanges(String videoPath, double threshold) throws IOException {
        List<SceneCut> sceneCuts = new ArrayList<>();
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(videoPath);

        try {
            grabber.start();

            Frame prevFrame = null;
            double frameRate = grabber.getFrameRate();
            int frameNumber = 0;

            while (true) {
                Frame frame = grabber.grabImage();
                if (frame == null) {
                    break; // End of video
                }

                if (prevFrame != null) {
                    double difference = calculateFrameDifference(prevFrame, frame);

                    if (difference > threshold) {
                        // Scene change detected.  Store the timestamp in seconds.
                        double timestamp = (double) frameNumber / frameRate;
                        sceneCuts.add(new SceneCut(timestamp));
                        System.out.println("Scene change detected at: " + timestamp + " seconds. Difference: " + difference);
                    }
                }

                prevFrame = frame;
                frameNumber++;
            }

        } finally {
            try {
                grabber.stop();
                grabber.release();
            } catch (FrameGrabber.Exception e) {
                System.err.println("Error releasing grabber: " + e.getMessage());
            }
        }

        return sceneCuts;
    }


    /**
     * Calculates the difference between two frames using a simple pixel-by-pixel comparison.
     * Note:  This is a very basic method.  More sophisticated methods like histogram comparison
     *       can provide better results.
     * @param frame1 The first frame.
     * @param frame2 The second frame.
     * @return The difference between the frames.
     */
    private static double calculateFrameDifference(Frame frame1, Frame frame2) {
        if (frame1.image == null || frame2.image == null) {
            return 0.0; // Handle null frames gracefully
        }

        // Ensure frames have the same dimensions
        if (frame1.imageWidth != frame2.imageWidth || frame1.imageHeight != frame2.imageHeight || frame1.imageDepth != frame2.imageDepth) {
            System.err.println("Warning: Frames have different dimensions. Difference calculation may be inaccurate.");
            return 0.0; //Or throw an exception - depending on how you want to handle this edge case.
        }

        int width = frame1.imageWidth;
        int height = frame1.imageHeight;
        int depth = frame1.imageDepth; // e.g., 1 for grayscale, 3 for RGB

        double totalDifference = 0.0;
        int pixelCount = width * height * depth;

        //Direct access to image data is considered unsafe, but necessary here for performance.  Consider alternative approaches for production code.
        for (int i = 0; i < width; i++) {
            for (int j = 0; j < height; j++) {
                for (int k = 0; k < depth; k++) {
                    double pixel1 = frame1.image[k * width * height + j * width + i].getDouble();  // Assumes planar format
                    double pixel2 = frame2.image[k * width * height + j * width + i].getDouble();
                    totalDifference += Math.abs(pixel1 - pixel2);
                }
            }
        }


        return totalDifference / pixelCount;  // Average difference per pixel
    }




    /**
     * Generates a list of highlights based on the detected scene cuts.
     * @param sceneCuts A list of SceneCut objects.
     * @param minDuration The minimum duration of a highlight in seconds.
     * @param maxDuration The maximum duration of a highlight in seconds.
     * @return A list of Highlight objects.
     */
    public static List<Highlight> generateHighlights(List<SceneCut> sceneCuts, double minDuration, double maxDuration) {
        List<Highlight> highlights = new ArrayList<>();

        if (sceneCuts.isEmpty()) {
            return highlights; // No highlights to create
        }

        double startTime = 0; // Start from the beginning of the video

        for (int i = 0; i < sceneCuts.size(); i++) {
            double endTime;

            if (i < sceneCuts.size() - 1) {
                endTime = sceneCuts.get(i + 1).getTimestamp(); // Up to the next scene cut
            } else {
                endTime = Double.MAX_VALUE; // Up to the end of the video (we'll clip it later)
            }

            double duration = endTime - startTime;

            if (duration >= minDuration && duration <= maxDuration) {
                highlights.add(new Highlight(startTime, endTime));
                startTime = endTime; // Start the next highlight from the end of the previous one.
            } else if (duration > maxDuration) {
                // If the segment is too long, create a highlight of maxDuration and move on.
                highlights.add(new Highlight(startTime, startTime + maxDuration));
                startTime = startTime + maxDuration;
            }
            //If it is shorter than the minimum duration, skip this segment and move to the next scene cut.
            else {
                startTime = endTime;
            }

        }

        return highlights;
    }


    /**
     * Generates a highlight reel from a video file based on the provided highlights.
     * @param inputVideoPath The path to the input video file.
     * @param highlights A list of Highlight objects.
     * @param outputDirectory The directory where the highlight reels will be saved.
     * @throws IOException If an error occurs while processing the video.
     */
    public static void generateHighlightReel(String inputVideoPath, List<Highlight> highlights, String outputDirectory) throws IOException {
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputVideoPath);
        FFmpegFrameRecorder recorder = null;


        try {
            grabber.start();

            // Get video properties
            int videoWidth = grabber.getImageWidth();
            int videoHeight = grabber.getImageHeight();
            double frameRate = grabber.getFrameRate();
            int audioChannels = grabber.getAudioChannels();

            System.out.println("Video Width: " + videoWidth);
            System.out.println("Video Height: " + videoHeight);
            System.out.println("Frame Rate: " + frameRate);
            System.out.println("Audio Channels: " + audioChannels);

            for (int i = 0; i < highlights.size(); i++) {
                Highlight highlight = highlights.get(i);
                double startTime = highlight.getStartTime();
                double endTime = highlight.getEndTime();


                String outputFileName = String.format("%s/highlight_%d%s", outputDirectory, i + 1, OUTPUT_FILE_EXTENSION);
                System.out.println("Creating highlight: " + outputFileName + " from " + startTime + " to " + endTime);

                // Set up the recorder for each highlight
                recorder = new FFmpegFrameRecorder(outputFileName, videoWidth, videoHeight, audioChannels);
                recorder.setFrameRate(frameRate);
                recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); // Or another suitable codec
                recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);   // Or another suitable codec

                recorder.start();


                // Seek to the start time
                grabber.setFrameNumber((int) (startTime * frameRate)); // Seek by frame number for greater accuracy
                long startFrameNumber = grabber.getFrameNumber();

                // Record the highlight
                Frame frame;
                Frame audioFrame;

                while (true) {
                    frame = grabber.grabImage();
                    audioFrame = grabber.grabFrame(false, true, false, false); //Grab audio at the same time as the video frame

                    if (frame == null) {
                        System.out.println("Frame is null, stopping recording.");
                        break; // End of video or error
                    }

                    long currentFrameNumber = grabber.getFrameNumber();
                    double currentTime = (double) currentFrameNumber / frameRate;

                    if (currentTime >= endTime) {
                        System.out.println("End time reached, stopping recording.");
                        break; // End of highlight
                    }


                    recorder.record(frame);
                    if (audioFrame != null) {
                        recorder.record(audioFrame);
                    }
                }

                recorder.stop();
                recorder.release();

            }


        } finally {
            try {
                grabber.stop();
                grabber.release();
            } catch (FrameGrabber.Exception e) {
                System.err.println("Error releasing grabber: " + e.getMessage());
            }

            if (recorder != null) {
                try {
                    recorder.stop();
                    recorder.release();
                } catch (FrameRecorder.Exception e) {
                    System.err.println("Error releasing recorder: " + e.getMessage());
                }
            }

        }
    }


    /**
     * Represents a scene cut in a video.
     */
    private static class SceneCut {
        private double timestamp; // Timestamp in seconds

        public SceneCut(double timestamp) {
            this.timestamp = timestamp;
        }

        public double getTimestamp() {
            return timestamp;
        }
    }

    /**
     * Represents a highlight in a video.
     */
    private static class Highlight {
        private double startTime; // Start time in seconds
        private double endTime;   // End time in seconds

        public Highlight(double startTime, double endTime) {
            this.startTime = startTime;
            this.endTime = endTime;
        }

        public double getStartTime() {
            return startTime;
        }

        public double getEndTime() {
            return endTime;
        }
    }
}
```

Key improvements and explanations:

* **Error Handling:** Added `try-catch` blocks throughout the code to handle potential `IOExceptions` during file operations, `FrameGrabber.Exception`, and `FrameRecorder.Exception`. This makes the program more robust.
* **Clearer Usage Instructions:** Added a simple command-line usage message.
* **Output Directory:** The code now creates an `highlights` directory to store the generated highlights, preventing clutter in the main directory.  It also checks if the directory already exists and handles the case where the directory cannot be created.
* **Frame Differencing Improvement:** The `calculateFrameDifference` method now uses direct access to the image data ( `frame.image[k * width * height + j * width + i].getDouble()` for faster calculations. IMPORTANT: added a big warning about it being unsafe, and to explore other options.
* **Scene Cut Threshold:**  Emphasized that `SCENE_CHANGE_THRESHOLD` is a crucial parameter to tune. Added a comment explaining that higher values mean fewer cuts.
* **Highlight Generation Logic:** Improved the `generateHighlights` method to handle cases where scene cuts are very close together (shorter than `minDuration`). It also now ensures that no highlight exceeds the `maxDuration`.
* **Accurate Seeking:** The `generateHighlightReel` function uses `grabber.setFrameNumber((int) (startTime * frameRate))` for seeking to the correct start time of each highlight. This is more accurate than seeking by timestamp, especially for videos with variable frame rates.
* **Audio Recording:** The code now attempts to record audio along with the video when generating the highlights, using `grabber.grabFrame(false, true, false, false)` to grab the audio frame. The audio channels are detected and passed to the recorder.
* **Frame/Audio synchronization:** The `grabber.grabFrame(false, true, false, false)` extracts audio from the video file
* **Video Codec Selection:**  Added `recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264)` and `recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC)` to explicitly set the video and audio codecs for the output files.  This helps ensure compatibility. The comments suggest alternative codecs too.
* **Resource Management (Important):**  The code *explicitly* stops and releases the `FFmpegFrameGrabber` and `FFmpegFrameRecorder` in `finally` blocks to ensure that resources are cleaned up properly, even if exceptions occur. This is critical to prevent memory leaks and other issues. It handles exceptions that can occur during releasing the resources too.
* **Comprehensive Comments:** Added more comments to explain the purpose of each section of the code.
* **Clearer Output:**  The code now prints more informative messages during the highlight reel generation process, including the start and end times of each highlight, and when the recording starts and stops.
* **Null Frame Handling:** added null check for frames from grabber, ending the loop if the end of the video is found.
* **Dimension Check**: Added a warning if frames have different dimensions and returns a difference of zero, as calculations on unequal frames can lead to inaccurate results or errors.
* **Highlight duration boundaries**: The duration is now limited by minimum and maximum highlight duration constraints.
* **Timestamp accuracy**: Timestamps are now using more precise double values.
* **Filename improvements**: The generated highlight filenames now have correct extensions.
* **Frame rate precision**: Frame rates are read from the video file, avoiding hardcoding.
* **Output directory creation**: The program automatically creates an output directory for the highlight reels.
* **Closes resources**: The program now closes resources in a `finally` block, even if an error occurs.
* **Dependency Information**: Includes the necessary `javacv` dependencies at the top.

**To Run This Code:**

1. **Install Java:** Ensure you have a Java Development Kit (JDK) installed (version 8 or later is recommended).
2. **Download JavaCV:**  Download the JavaCV binaries from Maven Central or use a dependency management tool like Maven or Gradle.
3. **Add JavaCV to Your Project:** If you're using an IDE, add the JavaCV JAR files to your project's classpath.

   **Maven Dependency:**

   ```xml
   <dependency>
       <groupId>org.bytedeco</groupId>
       <artifactId>javacv</artifactId>
       <version>1.5.9</version>
   </dependency>
   <dependency>
      <groupId>org.bytedeco</groupId>
      <artifactId>ffmpeg</artifactId>
      <version>6.0-1.5.9</version>
   </dependency>

   ```

   **Gradle Dependency:**

   ```gradle
   dependencies {
       implementation group: 'org.bytedeco', name: 'javacv', version: '1.5.9'
       implementation group: 'org.bytedeco', name: 'ffmpeg', version: '6.0-1.5.9'
   }

   ```
4. **Compile:** Compile the Java code using `javac HighlightReelGenerator.java`.
5. **Run:** Run the compiled code from the command line:

   ```bash
   java HighlightReelGenerator /path/to/your/input_video.mp4
   ```

   Replace `/path/to/your/input_video.mp4` with the actual path to your video file.

**Important Considerations and Further Improvements:**

* **Scene Detection Accuracy:**  The simple frame differencing method is very basic.  For more robust scene detection, consider:
    * **Histogram Comparison:**  Compare the color histograms of frames.  Scene changes often involve significant shifts in color distribution.
    * **Edge Detection:**  Detect edges in each frame and compare the edge maps.  Scene changes often involve significant changes in the edges present.
    * **Machine Learning:**  Train a machine learning model to identify scene changes based on visual features.  This is the most advanced approach.
* **Performance:** Processing video can be computationally intensive.  Consider optimizing the code for performance:
    * **Multithreading:**  Use multiple threads to process frames in parallel.
    * **GPU Acceleration:**  JavaCV can leverage GPU acceleration for some operations.
* **More Sophisticated Highlight Selection:**  Instead of just using scene cuts, you could analyze the video for other features that might indicate interesting moments:
    * **Audio Level:**  Detect spikes in audio level (e.g., loud noises, music).
    * **Object Detection:**  Detect the presence of specific objects (e.g., faces, cars) that might be of interest.
    * **Motion:** Analyze the amount of motion in the scene.
* **User Interface:**  Create a graphical user interface (GUI) to allow users to select input videos, adjust parameters (e.g., `SCENE_CHANGE_THRESHOLD`), and preview the generated highlights.
* **Customizable Highlight Lengths:** Let the user specify desired highlight durations or a range of durations.
* **Progress Reporting:**  Display a progress bar to show the user how far along the processing is.
* **Configuration File:** Store parameters like thresholds, minimum/maximum durations, and output directory in a configuration file to make the program more flexible.

This improved example provides a good starting point for building a more sophisticated highlight reel generator. Remember to experiment with different scene detection methods and highlight selection criteria to achieve the best results for your specific needs.
👁️ Viewed: 5

Comments