diff --git a/mediainfo/src/main/cpp/CMakeLists.txt b/mediainfo/src/main/cpp/CMakeLists.txt index e8afeda..9e14e70 100644 --- a/mediainfo/src/main/cpp/CMakeLists.txt +++ b/mediainfo/src/main/cpp/CMakeLists.txt @@ -16,7 +16,7 @@ set( # List variable name ffmpeg_libs_names # Values in the list - avcodec avformat avutil) + avcodec avformat avutil swscale) foreach (ffmpeg_lib_name ${ffmpeg_libs_names}) add_library( @@ -35,7 +35,9 @@ add_library(${CMAKE_PROJECT_NAME} SHARED # List C/C++ source files with relative paths to this CMakeLists.txt. main.cpp mediainfo.cpp - utils.cpp) + utils.cpp + frame_loader_context.cpp + frame_extractor.cpp) # Specifies libraries CMake should link to your target library. You # can link libraries from various origins, such as libraries defined in this @@ -43,4 +45,5 @@ add_library(${CMAKE_PROJECT_NAME} SHARED target_link_libraries(${CMAKE_PROJECT_NAME} # List libraries link to the target library log + jnigraphics ${ffmpeg_libs_names}) \ No newline at end of file diff --git a/mediainfo/src/main/cpp/frame_extractor.cpp b/mediainfo/src/main/cpp/frame_extractor.cpp new file mode 100644 index 0000000..10790b6 --- /dev/null +++ b/mediainfo/src/main/cpp/frame_extractor.cpp @@ -0,0 +1,145 @@ +extern "C" { +#include +#include +#include +#include +} + +#include +#include "frame_loader_context.h" +#include "log.h" + +bool frame_extractor_load_frame(JNIEnv *env, int64_t jFrameLoaderContextHandle, int64_t time_millis, jobject jBitmap) { + AndroidBitmapInfo bitmapMetricInfo; + AndroidBitmap_getInfo(env, jBitmap, &bitmapMetricInfo); + + auto *videoStream = frame_loader_context_from_handle(jFrameLoaderContextHandle); + + auto pixelFormat = static_cast(videoStream->parameters->format); + if (pixelFormat == AV_PIX_FMT_NONE) { + // With pipe protocol some files fail to provide pixel format info. + // In this case we can't establish neither scaling nor even a frame extracting. + return false; + } + + bool resultValue = true; + + SwsContext *scalingContext = + sws_getContext( + // srcW + videoStream->parameters->width, + // srcH + videoStream->parameters->height, + // srcFormat + pixelFormat, + // dstW + bitmapMetricInfo.width, + // dstH + bitmapMetricInfo.height, + // dstFormat + AV_PIX_FMT_RGBA, + SWS_BICUBIC, nullptr, nullptr, nullptr); + + AVStream *avVideoStream = videoStream->avFormatContext->streams[videoStream->videoStreamIndex]; + + int64_t videoDuration = avVideoStream->duration; + // In some cases the duration is of a video stream is set to Long.MIN_VALUE and we need compute it in another way + if (videoDuration == LONG_LONG_MIN && avVideoStream->time_base.den != 0) { + videoDuration = videoStream->avFormatContext->duration / avVideoStream->time_base.den; + } + + + AVPacket *packet = av_packet_alloc(); + AVFrame *frame = av_frame_alloc(); + + int64_t seekPosition = videoDuration / 3; + + if (time_millis != -1) { + int64_t seek_time = av_rescale_q(time_millis, AV_TIME_BASE_Q, avVideoStream->time_base); + if (seek_time < videoDuration) { + seekPosition = seek_time; + } + } + + av_seek_frame(videoStream->avFormatContext, + videoStream->videoStreamIndex, + seekPosition, + 0); + + AVCodecContext *videoCodecContext = avcodec_alloc_context3(videoStream->avVideoCodec); + avcodec_parameters_to_context(videoCodecContext, videoStream->parameters); + avcodec_open2(videoCodecContext, videoStream->avVideoCodec, nullptr); + + while (true) { + if (av_read_frame(videoStream->avFormatContext, packet) < 0) { + // Couldn't read a packet, so we skip the whole frame + resultValue = false; + break; + } + + if (packet->stream_index == videoStream->videoStreamIndex) { + avcodec_send_packet(videoCodecContext, packet); + int response = avcodec_receive_frame(videoCodecContext, frame); + if (response == AVERROR(EAGAIN)) { + // A frame can be split across several packets, so continue reading in this case + continue; + } + + if (response >= 0) { + AVFrame *frameForDrawing = av_frame_alloc(); + void *bitmapBuffer; + AndroidBitmap_lockPixels(env, jBitmap, &bitmapBuffer); + + // Prepare a FFmpeg's frame to use Android Bitmap's buffer + av_image_fill_arrays( + frameForDrawing->data, + frameForDrawing->linesize, + static_cast(bitmapBuffer), + AV_PIX_FMT_RGBA, + bitmapMetricInfo.width, + bitmapMetricInfo.height, + 1); + + // Scale the frame that was read from the media to a frame that wraps Android Bitmap's buffer + sws_scale( + scalingContext, + frame->data, + frame->linesize, + 0, + videoStream->parameters->height, + frameForDrawing->data, + frameForDrawing->linesize); + + av_frame_free(&frameForDrawing); + + AndroidBitmap_unlockPixels(env, jBitmap); + break; + } + } + av_packet_unref(packet); + } + + av_packet_free(&packet); + av_frame_free(&frame); + avcodec_free_context(&videoCodecContext); + + sws_freeContext(scalingContext); + + return resultValue; +} + +extern "C" +JNIEXPORT void JNICALL +Java_io_github_anilbeesetti_nextlib_mediainfo_FrameLoader_nativeRelease(JNIEnv *env, jclass clazz, + jlong jFrameLoaderContextHandle) { + frame_loader_context_free(jFrameLoaderContextHandle); +} +extern "C" +JNIEXPORT jboolean JNICALL +Java_io_github_anilbeesetti_nextlib_mediainfo_FrameLoader_nativeLoadFrame(JNIEnv *env, jclass clazz, + jlong jFrameLoaderContextHandle, + jlong time_millis, + jobject jBitmap) { + bool successfullyLoaded = frame_extractor_load_frame(env, jFrameLoaderContextHandle, time_millis, jBitmap); + return static_cast(successfullyLoaded); +} \ No newline at end of file diff --git a/mediainfo/src/main/cpp/frame_loader_context.cpp b/mediainfo/src/main/cpp/frame_loader_context.cpp new file mode 100644 index 0000000..fe48fa9 --- /dev/null +++ b/mediainfo/src/main/cpp/frame_loader_context.cpp @@ -0,0 +1,17 @@ +#include "frame_loader_context.h" + +FrameLoaderContext *frame_loader_context_from_handle(int64_t handle) { + return reinterpret_cast(handle); +} + +int64_t frame_loader_context_to_handle(FrameLoaderContext *frameLoaderContext) { + return reinterpret_cast(frameLoaderContext); +} + +void frame_loader_context_free(int64_t handle) { + auto *frameLoaderContext = frame_loader_context_from_handle(handle); + auto *avFormatContext = frameLoaderContext->avFormatContext; + + avformat_close_input(&avFormatContext); + free(frameLoaderContext); +} \ No newline at end of file diff --git a/mediainfo/src/main/cpp/frame_loader_context.h b/mediainfo/src/main/cpp/frame_loader_context.h new file mode 100644 index 0000000..c0f7343 --- /dev/null +++ b/mediainfo/src/main/cpp/frame_loader_context.h @@ -0,0 +1,49 @@ +#ifndef NEXTPLAYER_FRAME_LOADER_CONTEXT_H +#define NEXTPLAYER_FRAME_LOADER_CONTEXT_H + + +#include + +extern "C" { +#include +#include +} + +/** + * A struct that is stored in a MediaInfo + * Aggregates necessary pointers to FFmpeg structs. + */ +struct FrameLoaderContext { + // Root FFmpeg object for the given media. + AVFormatContext *avFormatContext; + // Parameters of a video stream. + AVCodecParameters *parameters; + // Codec of a video stream. + const AVCodec *avVideoCodec; + // And index of a video stream in the avFormatContext. + int videoStreamIndex; +}; + +/** + * Function that converts a pointer to FrameLoaderContext from a int64_t handle. + * + * @param handle a pointer to actual FrameLoaderContext struct + */ +FrameLoaderContext *frame_loader_context_from_handle(int64_t handle); + +/** + * Converts a pointer to FrameLoaderContext struct to a long value to be stored in the JVM part. + * + * @param frameLoaderContext a pointer to convert + * @return a converted pointer + */ +int64_t frame_loader_context_to_handle(FrameLoaderContext *frameLoaderContext); + +/** + * Frees the FrameLoaderContext struct. + * + * @param handle a pointer to a FrameLoaderContext struct to free + */ +void frame_loader_context_free(int64_t handle); + +#endif //NEXTPLAYER_FRAME_LOADER_CONTEXT_H diff --git a/mediainfo/src/main/cpp/mediainfo.cpp b/mediainfo/src/main/cpp/mediainfo.cpp index 88f242a..23ffa9a 100644 --- a/mediainfo/src/main/cpp/mediainfo.cpp +++ b/mediainfo/src/main/cpp/mediainfo.cpp @@ -2,6 +2,7 @@ #include #include "utils.h" #include "log.h" +#include "frame_loader_context.h" extern "C" { #include "libavformat/avformat.h" @@ -38,7 +39,7 @@ void onMediaInfoFound(JNIEnv *env, jobject jMediaInfoBuilder, AVFormatContext *a jMediaInfoBuilder, fields.MediaInfoBuilder.onMediaInfoFoundID, jFileFormatName, - avFormatContext->duration / 1000); + avFormatContext->duration); } void onVideoStreamFound(JNIEnv *env, jobject jMediaInfoBuilder, AVFormatContext *avFormatContext, int index) { @@ -47,6 +48,18 @@ void onVideoStreamFound(JNIEnv *env, jobject jMediaInfoBuilder, AVFormatContext auto codecDescriptor = avcodec_descriptor_get(parameters->codec_id); + int64_t frameLoaderContextHandle = -1; + auto *decoder = avcodec_find_decoder(parameters->codec_id); + if (decoder != nullptr) { + auto *frameLoaderContext = (FrameLoaderContext *) malloc(sizeof(FrameLoaderContext));; + frameLoaderContext->avFormatContext = avFormatContext; + frameLoaderContext->parameters = parameters; + frameLoaderContext->avVideoCodec = decoder; + frameLoaderContext->videoStreamIndex = index; + frameLoaderContextHandle = frame_loader_context_to_handle(frameLoaderContext); + } + + AVRational guessedFrameRate = av_guess_frame_rate(avFormatContext, avFormatContext->streams[index], nullptr); double resultFrameRate = guessedFrameRate.den == 0 ? 0.0 : guessedFrameRate.num / (double) guessedFrameRate.den; @@ -66,7 +79,8 @@ void onVideoStreamFound(JNIEnv *env, jobject jMediaInfoBuilder, AVFormatContext parameters->bit_rate, resultFrameRate, parameters->width, - parameters->height); + parameters->height, + frameLoaderContextHandle); } void onAudioStreamFound(JNIEnv *env, jobject jMediaInfoBuilder, AVFormatContext *avFormatContext, int index) { @@ -141,7 +155,7 @@ void media_info_build(JNIEnv *env, jobject jMediaInfoBuilder, const char *uri) { AVMediaType type = parameters->codec_type; switch (type) { case AVMEDIA_TYPE_VIDEO: - onVideoStreamFound(env, jMediaInfoBuilder, avFormatContext, pos); + onVideoStreamFound(env, jMediaInfoBuilder, avFormatContext, pos); break; case AVMEDIA_TYPE_AUDIO: onAudioStreamFound(env, jMediaInfoBuilder, avFormatContext, pos); @@ -151,7 +165,6 @@ void media_info_build(JNIEnv *env, jobject jMediaInfoBuilder, const char *uri) { break; } } - avformat_free_context(avFormatContext); } extern "C" diff --git a/mediainfo/src/main/cpp/utils.cpp b/mediainfo/src/main/cpp/utils.cpp index f22a8f1..8a52d85 100644 --- a/mediainfo/src/main/cpp/utils.cpp +++ b/mediainfo/src/main/cpp/utils.cpp @@ -61,7 +61,7 @@ int utils_fields_init(JavaVM *vm) { GET_ID(GetMethodID, fields.MediaInfoBuilder.onVideoStreamFoundID, fields.MediaInfoBuilder.clazz, - "onVideoStreamFound", "(ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;IJDII)V" + "onVideoStreamFound", "(ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;IJDIIJ)V" ); GET_ID(GetMethodID, diff --git a/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/FrameLoader.kt b/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/FrameLoader.kt new file mode 100644 index 0000000..34ba5d8 --- /dev/null +++ b/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/FrameLoader.kt @@ -0,0 +1,24 @@ +package io.github.anilbeesetti.nextlib.mediainfo + +import android.graphics.Bitmap + +class FrameLoader internal constructor(private var frameLoaderContextHandle: Long) { + + fun loadFrameInto(bitmap: Bitmap, durationMillis: Long): Boolean { + require(frameLoaderContextHandle != -1L) + return nativeLoadFrame(frameLoaderContextHandle, durationMillis, bitmap) + } + + fun release() { + nativeRelease(frameLoaderContextHandle) + frameLoaderContextHandle = -1 + } + + companion object { + @JvmStatic + private external fun nativeRelease(handle: Long) + + @JvmStatic + private external fun nativeLoadFrame(handle: Long, durationMillis: Long, bitmap: Bitmap): Boolean + } +} \ No newline at end of file diff --git a/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfo.kt b/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfo.kt index 645e66f..8d25acf 100644 --- a/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfo.kt +++ b/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfo.kt @@ -1,9 +1,35 @@ package io.github.anilbeesetti.nextlib.mediainfo +import android.graphics.Bitmap + data class MediaInfo( val format: String, val duration: Long, val videoStream: VideoStream?, val audioStreams: List, - val subtitleStreams: List -) + val subtitleStreams: List, + private val frameLoaderContext: Long? +) { + + private var frameLoader = frameLoaderContext?.let { FrameLoader(frameLoaderContext) } + + val supportsFrameLoading: Boolean = frameLoader != null + + /** + * Retrieves a video frame as a Bitmap at a specific duration in milliseconds from the video stream. + * + * @param durationMillis The timestamp in milliseconds at which to retrieve the video frame. + * If set to -1, the frame will be retrieved at one-third of the video's duration. + * @return A Bitmap containing the video frame if retrieval is successful, or null if an error occurs. + */ + fun getFrame(durationMillis: Long = -1): Bitmap? { + val bitmap = Bitmap.createBitmap(videoStream!!.frameWidth, videoStream.frameHeight, Bitmap.Config.ARGB_8888) + val result = frameLoader?.loadFrameInto(bitmap, durationMillis) + return if (result == true) bitmap else null + } + + fun release() { + frameLoader?.release() + frameLoader = null + } +} diff --git a/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfoBuilder.kt b/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfoBuilder.kt index eb05acd..681bd76 100644 --- a/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfoBuilder.kt +++ b/mediainfo/src/main/java/io/github/anilbeesetti/nextlib/mediainfo/MediaInfoBuilder.kt @@ -13,6 +13,7 @@ class MediaInfoBuilder(private val context: Context) { private var fileFormatName: String? = null private var duration: Long? = null + private var frameLoaderContextHandle: Long? = null private var videoStream: VideoStream? = null private var audioStreams = mutableListOf() private var subtitleStreams = mutableListOf() @@ -54,7 +55,8 @@ class MediaInfoBuilder(private val context: Context) { duration!!, videoStream, audioStreams, - subtitleStreams + subtitleStreams, + frameLoaderContextHandle ) } else null } @@ -94,6 +96,7 @@ class MediaInfoBuilder(private val context: Context) { frameRate: Double, frameWidth: Int, frameHeight: Int, + frameLoaderContext: Long ) { if (videoStream == null) { videoStream = VideoStream( @@ -107,6 +110,9 @@ class MediaInfoBuilder(private val context: Context) { frameWidth = frameWidth, frameHeight = frameHeight ) + if (frameLoaderContext != -1L) { + frameLoaderContextHandle = frameLoaderContext + } } }