Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added initial version of the transcoding #114

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion common/include/VkVideoCore/VkVideoCoreProfile.h
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,12 @@ class VkVideoCoreProfile
VkVideoComponentBitDepthFlagsKHR lumaBitDepth = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR,
VkVideoComponentBitDepthFlagsKHR chromaBitDepth = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR,
uint32_t videoH26xProfileIdc = 0,
VkVideoEncodeTuningModeKHR tuningMode = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR)
#if (_TRANSCODING)
VkVideoEncodeUsageInfoKHR InEncodeUsage = {}
#else
VkVideoEncodeTuningModeKHR tuningMode = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR
#endif // _TRANSCODING
)
: m_profile({VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR, NULL,
videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth}),
m_profileList({VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR, NULL, 1, &m_profile})
Expand All @@ -210,8 +215,12 @@ class VkVideoCoreProfile
VkVideoEncodeAV1ProfileInfoKHR encodeAV1ProfilesRequest;
VkBaseInStructure* pVideoProfileExt = NULL;

#if (_TRANSCODING)
VkVideoEncodeUsageInfoKHR encodeUsageInfoRequest = InEncodeUsage;
#else
VkVideoEncodeUsageInfoKHR encodeUsageInfoRequest{VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR,
NULL, 0, 0, tuningMode};
#endif // _TRANSCODING
VkBaseInStructure* pEncodeUsageInfo = (VkBaseInStructure*)&encodeUsageInfoRequest;

if (videoCodecOperation == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) {
Expand Down
16 changes: 16 additions & 0 deletions common/libs/VkCodecUtils/FrameProcessor.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,12 @@

#include "VkCodecUtils/VkVideoRefCountBase.h"
#include "VkCodecUtils/ProgramConfig.h"
#if (_TRANSCODING)
#include "VkCodecUtils/VulkanDecodedFrame.h"
#include "VkCodecUtils/VulkanEncoderFrameProcessor.h"
#include "VkCodecUtils/ProgramConfig.h"
#include "VkVideoEncoder/VkEncoderConfig.h"
#endif // _TRANSCODING

class Shell;

Expand Down Expand Up @@ -65,6 +71,16 @@ class FrameProcessor : public VkVideoRefCountBase {
const VkSemaphore* pWaitSemaphores = nullptr,
uint32_t signalSemaphoreCount = 0,
const VkSemaphore* pSignalSemaphores = nullptr) = 0;
#if (_TRANSCODING)
virtual bool OnFrameTranscoding( int32_t renderIndex,
ProgramConfig* programConfig,
VkSharedBaseObj<EncoderConfig>& encoderConfig,
uint32_t waitSemaphoreCount = 0,
const VkSemaphore* pWaitSemaphores = nullptr,
uint32_t signalSemaphoreCount = 0,
const VkSemaphore* pSignalSemaphores = nullptr,
VulkanDecodedFrame* pLastFrameDecoded = nullptr) = 0;
#endif // _TRANSCODING

uint64_t GetTimeDiffNanoseconds(bool updateStartTime = true)
{
Expand Down
3 changes: 3 additions & 0 deletions common/libs/VkCodecUtils/HelpersDispatchTable.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,9 @@ void InitDispatchTableMiddle(VkInstance instance, bool include_bottom, VkInterfa
#ifdef VK_USE_VIDEO_QUEUE
pVkFunctions->GetPhysicalDeviceVideoCapabilitiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR>(getInstanceProcAddrFunc(instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR"));
#endif
#ifdef VK_USE_VIDEO_QUEUE
pVkFunctions->GetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR>(getInstanceProcAddrFunc(instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR"));
#endif

if (!include_bottom)
return;
Expand Down
1 change: 1 addition & 0 deletions common/libs/VkCodecUtils/HelpersDispatchTable.h
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,7 @@ struct VkInterfaceFunctions {
// VK_KHR_video_queue
PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR GetPhysicalDeviceVideoFormatPropertiesKHR;
PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR GetPhysicalDeviceVideoCapabilitiesKHR;
PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR GetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR;
PFN_vkCreateVideoSessionKHR CreateVideoSessionKHR;
PFN_vkDestroyVideoSessionKHR DestroyVideoSessionKHR;
PFN_vkCreateVideoSessionParametersKHR CreateVideoSessionParametersKHR;
Expand Down
14 changes: 13 additions & 1 deletion common/libs/VkCodecUtils/ProgramConfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ struct ProgramConfig {
{"--help", nullptr, 0, "Show this help",
[argv](const char **, const ProgramArgs &a) {
int rtn = showHelp(argv, a);
#if (!_TRANSCODING) // transcoding: should print encode info as well
exit(EXIT_SUCCESS);
#endif // !_TRANSCODING
return rtn;
}},
{"--enableStrDemux", nullptr, 0, "Enable stream demuxing",
Expand All @@ -122,7 +124,12 @@ struct ProgramConfig {
enableStreamDemuxing = false;
return true;
}},
{"--codec", nullptr, 1, "Codec to decode",
{
#if (!_TRANSCODING) // transcoding: to prevent overlap with encoder's codec option
"--codec", nullptr, 1, "Codec to decode",
#else
"--codec-input", nullptr, 1, "Codec to decode",
#endif // !_TRANSCODING
[this](const char **args, const ProgramArgs &a) {
if ((strcmp(args[0], "hevc") == 0) ||
(strcmp(args[0], "h265") == 0)) {
Expand Down Expand Up @@ -330,10 +337,15 @@ struct ProgramConfig {
(a.short_flag != nullptr && strcmp(argv[i], a.short_flag) == 0);
});
if (flag == spec.end()) {
#if (!_TRANSCODING) // transcoding: should parse encode info after decode info as well
std::cerr << "Unknown argument \"" << argv[i] << "\"" << std::endl;
std::cout << std::endl;
continue;
showHelp(argv, spec);
exit(EXIT_FAILURE);
#else
continue;
#endif // !_TRANSCODING
}

if (i + flag->numArgs >= argc) {
Expand Down
2 changes: 1 addition & 1 deletion common/libs/VkCodecUtils/VkImageResource.h
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class VkImageResourceView : public VkVideoRefCountBase
}

operator VkImageView() const { return m_imageViews[0]; }
VkImageView GetImageView() const { return m_imageViews[0]; }
VkImageView GetImageView(int i = 0) const { return m_imageViews[i]; }
uint32_t GetNumberOfPlanes() const { return m_numPlanes; }
VkImageView GetPlaneImageView(uint32_t planeIndex = 0) const { assert(planeIndex < m_numPlanes); return m_imageViews[planeIndex + 1]; }
VkDevice GetDevice() const { return *m_vkDevCtx; }
Expand Down
10 changes: 9 additions & 1 deletion common/libs/VkCodecUtils/VkVideoQueue.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@
#define _VKCODECUTILS_VKVIDEOQUEUE_H_

#include "VkCodecUtils/VkVideoRefCountBase.h"
#if (_TRANSCODING)
#include "VkCodecUtils/ProgramConfig.h"
#include "VkVideoEncoder/VkEncoderConfig.h"
#endif // _TRANSCODING

template<class FrameDataType>
class VkVideoQueue : public VkVideoRefCountBase {
Expand All @@ -30,7 +34,11 @@ class VkVideoQueue : public VkVideoRefCountBase {
virtual VkFormat GetFrameImageFormat(int32_t* pWidth = nullptr,
int32_t* pHeight = nullptr,
int32_t* pBitDepth = nullptr) const = 0;
virtual int32_t GetNextFrame(FrameDataType* pFrame, bool* endOfStream) = 0;
virtual int32_t GetNextFrame(FrameDataType* pFrame, bool* endOfStream
#if (_TRANSCODING)
, ProgramConfig* programConfig = nullptr, VkSharedBaseObj<EncoderConfig>* encoderConfig = nullptr
#endif // _TRANSCODING
) = 0;
virtual int32_t ReleaseFrame(FrameDataType* pDisplayedFrame) = 0;
public:
virtual ~VkVideoQueue() {};
Expand Down
172 changes: 172 additions & 0 deletions common/libs/VkCodecUtils/VulkanFrame.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@
#include "VulkanFrame.h"
#include "vk_enum_string_helper.h"
#include "VkVideoCore/DecodeFrameBufferIf.h"
#if (_TRANSCODING)
#include "VkVideoEncoder/VkVideoEncoder.h"
#endif // _TRANSCODING

template<class FrameDataType>
VulkanFrame<FrameDataType>::VulkanFrame(const VulkanDeviceContext* vkDevCtx,
Expand Down Expand Up @@ -270,6 +273,37 @@ bool VulkanFrame<FrameDataType>::OnKey(Key key)
return true;
}

#if (_TRANSCODING)
static int InitPreset(VkSharedBaseObj<EncoderConfig>& encoderConfig)
{
if (encoderConfig->codec == VkVideoCodecOperationFlagBitsKHR::VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR) encoderConfig->qualityLevel = 0;

if (encoderConfig->encodingProfile == EncoderConfig::LOW_LATENCY_STREAMING)
{
encoderConfig->rateControlMode = VkVideoEncodeRateControlModeFlagBitsKHR::VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR;
encoderConfig->gopStructure.SetConsecutiveBFrameCount(0);
encoderConfig->gopStructure.SetGopFrameCount((uint16_t)-1);
encoderConfig->gopStructure.SetIdrPeriod(0);
encoderConfig->gopStructure.SetLastFrameType(VkVideoGopStructure::FrameType::FRAME_TYPE_P); // ? set right value
encoderConfig->encodeUsageHints = VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR;
encoderConfig->encodeContentHints = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR;
// encoderConfig->gopStructure.SetTemporalLayerCount(3);
}
else if (encoderConfig->encodingProfile == EncoderConfig::ARCHIVING)
{
encoderConfig->rateControlMode = VkVideoEncodeRateControlModeFlagBitsKHR::VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR;
encoderConfig->maxBitrate = (uint32_t)(1.2f * encoderConfig->averageBitrate); // This is used in Variable Bit Rate (VBR) mode and is ignored for Constant Bit Rate (CBR) mode.
encoderConfig->gopStructure.SetConsecutiveBFrameCount(3);
encoderConfig->gopStructure.SetIdrPeriod(MAX_GOP_SIZE);
encoderConfig->gopStructure.SetGopFrameCount(MAX_GOP_SIZE);
encoderConfig->gopStructure.SetTemporalLayerCount(1);
encoderConfig->encodeUsageHints = VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR;
encoderConfig->encodeContentHints = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR;
};
return 0;
}
#endif // _TRANSCODING

template<class FrameDataType>
bool VulkanFrame<FrameDataType>::OnFrame( int32_t renderIndex,
uint32_t waitSemaphoreCount,
Expand Down Expand Up @@ -394,13 +428,151 @@ bool VulkanFrame<FrameDataType>::OnFrame( int32_t renderIndex,
pSignalSemaphores,
pLastDecodedFrame);

// VkImageLastDecodedFrame()

if (VK_SUCCESS != result) {
return false;
}

return continueLoop;
}

#if (_TRANSCODING)
template<class FrameDataType>
bool VulkanFrame<FrameDataType>::OnFrameTranscoding( int32_t renderIndex,
ProgramConfig* programConfig,
VkSharedBaseObj<EncoderConfig>& encoderConfig,
uint32_t waitSemaphoreCount,
const VkSemaphore* pWaitSemaphores,
uint32_t signalSemaphoreCount,
const VkSemaphore* pSignalSemaphores,
VulkanDecodedFrame* pLastDecodedFrameRet)
{
// must not be used by encoder
int isFunctionUsed = 0;
isFunctionUsed++;
assert(isFunctionUsed == 0);//, "must not be used by encoder");
return false;
}

template<>
bool VulkanFrame<VulkanDecodedFrame>::OnFrameTranscoding( int32_t renderIndex,
ProgramConfig* programConfig,
VkSharedBaseObj<EncoderConfig>& encoderConfig,
uint32_t waitSemaphoreCount,
const VkSemaphore* pWaitSemaphores,
uint32_t signalSemaphoreCount,
const VkSemaphore* pSignalSemaphores,
VulkanDecodedFrame* pLastDecodedFrameRet)
{
InitPreset(encoderConfig);
bool continueLoop = true;
const bool dumpDebug = false;
const bool trainFrame = (renderIndex < 0);
const bool gfxRendererIsEnabled = (m_videoRenderer != nullptr);
m_frameCount++;

if (dumpDebug == false) {
bool displayTimeNow = false;
float fps = GetFrameRateFps(displayTimeNow);
if (displayTimeNow) {
std::cout << "\t\tFrame " << m_frameCount << ", FPS: " << fps << std::endl;
}
} else {
uint64_t timeDiffNanoSec = GetTimeDiffNanoseconds();
std::cout << "\t\t Time nanoseconds: " << timeDiffNanoSec <<
" milliseconds: " << timeDiffNanoSec / 1000 <<
" rate: " << 1000000000.0 / timeDiffNanoSec << std::endl;
}

VulkanDecodedFrame* pLastDecodedFrame = nullptr;
// FrameDataType* pLastDecodedFrame = nullptr;

if (m_videoQueue->IsValid() && !trainFrame) {

pLastDecodedFrame = &m_frameData[m_frameDataIndex];

// Graphics and present stages are not enabled.
// Make sure the frame complete query or fence are signaled (video frame is processed) before returning the frame.
if (false && (gfxRendererIsEnabled == false) && (pLastDecodedFrame != nullptr)) {

if (pLastDecodedFrame->queryPool != VK_NULL_HANDLE) {
auto startTime = std::chrono::steady_clock::now();
VkQueryResultStatusKHR decodeStatus;
VkResult result = m_vkDevCtx->GetQueryPoolResults(*m_vkDevCtx,
pLastDecodedFrame->queryPool,
pLastDecodedFrame->startQueryId,
1,
sizeof(decodeStatus),
&decodeStatus,
sizeof(decodeStatus),
VK_QUERY_RESULT_WITH_STATUS_BIT_KHR | VK_QUERY_RESULT_WAIT_BIT);

assert(result == VK_SUCCESS);
assert(decodeStatus == VK_QUERY_RESULT_STATUS_COMPLETE_KHR);
auto deltaTime = std::chrono::steady_clock::now() - startTime;
auto diffMilliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(deltaTime);
auto diffMicroseconds = std::chrono::duration_cast<std::chrono::microseconds>(deltaTime);
if (dumpDebug) {
std::cout << pLastDecodedFrame->pictureIndex << ": frameWaitTime: " <<
diffMilliseconds.count() << "." << diffMicroseconds.count() << " mSec" << std::endl;
}
} else if (pLastDecodedFrame->frameCompleteFence != VkFence()) {
VkResult result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &pLastDecodedFrame->frameCompleteFence, true, 100 * 1000 * 1000 /* 100 mSec */);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result);
}
result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, pLastDecodedFrame->frameCompleteFence);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result);
}
}
}

m_videoQueue->ReleaseFrame(pLastDecodedFrame);

pLastDecodedFrame->Reset();

bool endOfStream = false;
int32_t numVideoFrames = 0;

if (pLastDecodedFrame) {
numVideoFrames = m_videoQueue->GetNextFrame(pLastDecodedFrame, &endOfStream, programConfig, &encoderConfig);
}
if (endOfStream && (numVideoFrames < 0)) {
continueLoop = false;
bool displayTimeNow = true;
float fps = GetFrameRateFps(displayTimeNow);
if (displayTimeNow) {
std::cout << "\t\tFrame " << m_frameCount << ", FPS: " << fps << std::endl;
}
}
}

if (gfxRendererIsEnabled == false) {
m_frameDataIndex = (m_frameDataIndex + 1) % m_frameData.size();
return continueLoop;
}

VkResult result = DrawFrame(renderIndex,
waitSemaphoreCount,
pWaitSemaphores,
signalSemaphoreCount,
pSignalSemaphores,
pLastDecodedFrame);

// VkImageLastDecodedFrame()


if (VK_SUCCESS != result) {
return false;
}

return continueLoop;
}
#endif // _TRANSCODING

template<class FrameDataType>
VkResult VulkanFrame<FrameDataType>::DrawFrame( int32_t renderIndex,
Expand Down
10 changes: 10 additions & 0 deletions common/libs/VkCodecUtils/VulkanFrame.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,16 @@ class VulkanFrame : public FrameProcessor {
uint32_t signalSemaphoreCount = 0,
const VkSemaphore* pSignalSemaphores = nullptr);

#if (_TRANSCODING)
virtual bool OnFrameTranscoding(int32_t renderIndex,
ProgramConfig* programConfig,
VkSharedBaseObj<EncoderConfig>& encoderConfig,
uint32_t waitSemaphoreCount = 0,
const VkSemaphore* pWaitSemaphores = nullptr,
uint32_t signalSemaphoreCount = 0,
const VkSemaphore* pSignalSemaphores = nullptr,
VulkanDecodedFrame* pLastFrameDecoded = nullptr);
#endif // _TRANSCODING

VkResult DrawFrame( int32_t renderIndex,
uint32_t waitSemaphoreCount,
Expand Down
Loading