From aad9113bc6e5f9967fdfe8eef9797e50cacd6fa5 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Wed, 7 Feb 2024 20:43:46 +0100 Subject: [PATCH 01/53] Add a new UI feature called "Video AI-Enhancement" (VAE) Changes made: 1. Creation of a new class VideoEnhancement which check the liability to the feature. 2. Add the checkbox "Video AI-Enhancement" in the "Basic Settings" groupbox. 3. Disable VAE when fullscreen is selected 4. Add a registery record 5. On the Overlay and the mention "AI-Enhanced" when activated 6. Add a command line for the class VideoEnhancement --- .gitignore | 1 + app/app.pro | 2 + app/cli/commandlineparser.cpp | 3 + app/gui/SettingsView.qml | 106 ++++++++ app/main.cpp | 10 + app/settings/streamingpreferences.cpp | 3 + app/settings/streamingpreferences.h | 6 + app/streaming/video/ffmpeg.cpp | 14 +- app/streaming/video/ffmpeg.h | 2 + app/streaming/video/videoenhancement.cpp | 313 +++++++++++++++++++++++ app/streaming/video/videoenhancement.h | 71 +++++ 11 files changed, 528 insertions(+), 3 deletions(-) create mode 100644 app/streaming/video/videoenhancement.cpp create mode 100644 app/streaming/video/videoenhancement.h diff --git a/.gitignore b/.gitignore index 112e91394..869b5a3a2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ **/.vs/ +**/.vscode/ build/ config.tests/*/.qmake.stash config.tests/*/Makefile diff --git a/app/app.pro b/app/app.pro index ff0269e65..e46f8e683 100644 --- a/app/app.pro +++ b/app/app.pro @@ -199,6 +199,7 @@ SOURCES += \ gui/sdlgamepadkeynavigation.cpp \ streaming/video/overlaymanager.cpp \ backend/systemproperties.cpp \ + streaming/video/videoenhancement.cpp \ wm.cpp HEADERS += \ @@ -207,6 +208,7 @@ HEADERS += \ cli/pair.h \ settings/compatfetcher.h \ settings/mappingfetcher.h \ + streaming/video/videoenhancement.h \ utils.h \ backend/computerseeker.h \ backend/identitymanager.h \ diff --git a/app/cli/commandlineparser.cpp b/app/cli/commandlineparser.cpp index 23fc65cd3..0d2b7a99e 100644 --- a/app/cli/commandlineparser.cpp +++ b/app/cli/commandlineparser.cpp @@ -470,6 +470,9 @@ void StreamCommandLineParser::parse(const QStringList &args, StreamingPreference // Resolve --frame-pacing and --no-frame-pacing options preferences->framePacing = parser.getToggleOptionValue("frame-pacing", preferences->framePacing); + // Resolve --video-enhancement and --no-video-enhancement options + preferences->videoEnhancement = parser.getToggleOptionValue("video-enhancement", preferences->videoEnhancement); + // Resolve --mute-on-focus-loss and --no-mute-on-focus-loss options preferences->muteOnFocusLoss = parser.getToggleOptionValue("mute-on-focus-loss", preferences->muteOnFocusLoss); diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index da6e310da..449964543 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -7,12 +7,16 @@ import StreamingPreferences 1.0 import ComputerManager 1.0 import SdlGamepadKeyNavigation 1.0 import SystemProperties 1.0 +import VideoEnhancement 1.0 Flickable { id: settingsPage objectName: qsTr("Settings") signal languageChanged() + signal displayModeChanged() + signal windowModeChanged() + signal videoEnhancementChanged() boundsBehavior: Flickable.OvershootBounds @@ -231,6 +235,9 @@ Flickable { recalculateWidth() lastIndexValue = currentIndex + + // Signal other controls + displayModeChanged() } id: resolutionComboBox @@ -292,6 +299,9 @@ Flickable { else { updateBitrateForSelection() } + + // Signal other controls + displayModeChanged() } NavigableDialog { @@ -310,6 +320,9 @@ Flickable { onClosed: { widthField.clear() heightField.clear() + + // Signal other controls + displayModeChanged() } onRejected: { @@ -759,9 +772,25 @@ Flickable { activated(currentIndex) } + // Video Super-Resolution does not work in exclusive full screen, so auto switch do borderless window + // [TODO] This may change according to what AMD and Intel will implement, if they can allow video enhancement in fullscreen + function checkVSR(){ + if(videoEnhancementCheck.checked && model.get(currentIndex).val === StreamingPreferences.WM_FULLSCREEN){ + for (var i = 0; i < model.count; i++) { + var thisWm = model.get(i).val; + if (model.get(i).val === StreamingPreferences.WM_FULLSCREEN_DESKTOP) { + currentIndex = i + break + } + } + activated(currentIndex) + } + } + Component.onCompleted: { reinitialize() languageChanged.connect(reinitialize) + videoEnhancementChanged.connect(checkVSR) } id: windowModeComboBox @@ -771,6 +800,8 @@ Flickable { textRole: "text" onActivated: { StreamingPreferences.windowMode = model.get(currentIndex).val + // Signal others + windowModeChanged() } ToolTip.delay: 1000 @@ -812,6 +843,81 @@ Flickable { ToolTip.visible: hovered ToolTip.text: qsTr("Frame pacing reduces micro-stutter by delaying frames that come in too early") } + + CheckBox { + id: videoEnhancementCheck + width: parent.width + hoverEnabled: true + text: qsTr("Video AI-Enhancement") + font.pointSize: 12 + visible: VideoEnhancement.isUIvisible() + enabled: true + checked: StreamingPreferences.videoEnhancement + property bool checkedSaved + + onCheckedChanged: { + StreamingPreferences.videoEnhancement = checked + // The value of checkedSaved is set while changing the WindowMode, need to find a way not to. + if(StreamingPreferences.windowMode !== StreamingPreferences.WM_FULLSCREEN){ + checkedSaved = checked + } + // Signal others + videoEnhancementChanged() + } + ToolTip.delay: 1000 + ToolTip.timeout: 5000 + ToolTip.visible: hovered + ToolTip.text: + qsTr("Enhance video quality by utilizing the GPU's AI-Enhancement capabilities.") + + qsTr("\nThis feature effectively upscales, reduces compression artifacts and enhances the clarity of streamed content.") + + qsTr("\nNote:") + + qsTr("\n - For optimal performance, use the software in borderless window mode; this feature is not applicable in fullscreen mode.") + + qsTr("\n - If available, ensure that appropriate settings, such as VSR (Virtual Super Resolution), are enabled in your GPU driver configurations.") + + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") + + function reinitialize() { + if(typeof(checkedSaved) === "undefined"){ + checkedSaved = checked + } + if(!VideoEnhancement.isUIvisible()){ + checked = false + checkedSaved = checked + visible = false + } + // If Exclusive fullscreen is selected, disabled the VSR as it does not work in this window mode + else if(StreamingPreferences.windowMode === StreamingPreferences.WM_FULLSCREEN){ + checked = false + } + else { + // Get back the saved status + checked = checkedSaved + } + // Indicate if the feature is available but not officially deployed by the Vendor + if(VideoEnhancement.isExperimental()){ + text = qsTr("Video AI-Enhancement (Experimental)") + } + } + + Timer { + id: vsrTimer + interval: 300 // 0 to make it async to get the final status of StreamingPreferences.windowMode (which is set too late in the process) + running: false // Don't start the timer immediately + repeat: false // Run only once + + onTriggered: { + parent.reinitialize() + } + } + + Component.onCompleted: { + checkedSaved = checked + reinitialize() + windowModeChanged.connect(() => { + checked = checkedSaved + vsrTimer.start() + }) + } + } } } diff --git a/app/main.cpp b/app/main.cpp index 2a3768613..1f0f26df1 100644 --- a/app/main.cpp +++ b/app/main.cpp @@ -43,6 +43,7 @@ #include "streaming/session.h" #include "settings/streamingpreferences.h" #include "gui/sdlgamepadkeynavigation.h" +#include "streaming/video/videoenhancement.h" #if !defined(QT_DEBUG) && defined(Q_OS_WIN32) // Log to file for release Windows builds @@ -636,6 +637,15 @@ int main(int argc, char *argv[]) [](QQmlEngine* qmlEngine, QJSEngine*) -> QObject* { return new StreamingPreferences(qmlEngine); }); + qmlRegisterSingletonType("VideoEnhancement", 1, 0, "VideoEnhancement", + [](QQmlEngine *engine, QJSEngine *scriptEngine) -> QObject* { + Q_UNUSED(engine) + Q_UNUSED(scriptEngine) + VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); + // Set the ownership to CppOwnership to avoid an error when QLM engine tries to delete the object + QQmlEngine::setObjectOwnership(videoEnhancement, QQmlEngine::CppOwnership); + return videoEnhancement; + }); // Create the identity manager on the main thread IdentityManager::get(); diff --git a/app/settings/streamingpreferences.cpp b/app/settings/streamingpreferences.cpp index 6a883bc08..05fe6c4fa 100644 --- a/app/settings/streamingpreferences.cpp +++ b/app/settings/streamingpreferences.cpp @@ -30,6 +30,7 @@ #define SER_ABSTOUCHMODE "abstouchmode" #define SER_STARTWINDOWED "startwindowed" #define SER_FRAMEPACING "framepacing" +#define SER_VIDEOENHANCEMENT "videoenhancement" #define SER_CONNWARNINGS "connwarnings" #define SER_UIDISPLAYMODE "uidisplaymode" #define SER_RICHPRESENCE "richpresence" @@ -93,6 +94,7 @@ void StreamingPreferences::reload() absoluteMouseMode = settings.value(SER_ABSMOUSEMODE, false).toBool(); absoluteTouchMode = settings.value(SER_ABSTOUCHMODE, true).toBool(); framePacing = settings.value(SER_FRAMEPACING, false).toBool(); + videoEnhancement = settings.value(SER_VIDEOENHANCEMENT, false).toBool(); connectionWarnings = settings.value(SER_CONNWARNINGS, true).toBool(); richPresence = settings.value(SER_RICHPRESENCE, true).toBool(); gamepadMouse = settings.value(SER_GAMEPADMOUSE, true).toBool(); @@ -275,6 +277,7 @@ void StreamingPreferences::save() settings.setValue(SER_ABSMOUSEMODE, absoluteMouseMode); settings.setValue(SER_ABSTOUCHMODE, absoluteTouchMode); settings.setValue(SER_FRAMEPACING, framePacing); + settings.setValue(SER_VIDEOENHANCEMENT, videoEnhancement); settings.setValue(SER_CONNWARNINGS, connectionWarnings); settings.setValue(SER_RICHPRESENCE, richPresence); settings.setValue(SER_GAMEPADMOUSE, gamepadMouse); diff --git a/app/settings/streamingpreferences.h b/app/settings/streamingpreferences.h index 0e2cf365e..49558ae3b 100644 --- a/app/settings/streamingpreferences.h +++ b/app/settings/streamingpreferences.h @@ -116,6 +116,8 @@ class StreamingPreferences : public QObject Q_PROPERTY(bool absoluteMouseMode MEMBER absoluteMouseMode NOTIFY absoluteMouseModeChanged) Q_PROPERTY(bool absoluteTouchMode MEMBER absoluteTouchMode NOTIFY absoluteTouchModeChanged) Q_PROPERTY(bool framePacing MEMBER framePacing NOTIFY framePacingChanged) + Q_PROPERTY(bool videoEnhancement MEMBER videoEnhancement NOTIFY videoEnhancementChanged) + Q_PROPERTY(bool videoEnhancementVisible MEMBER videoEnhancementVisible NOTIFY videoEnhancementVisibleChanged) Q_PROPERTY(bool connectionWarnings MEMBER connectionWarnings NOTIFY connectionWarningsChanged) Q_PROPERTY(bool richPresence MEMBER richPresence NOTIFY richPresenceChanged) Q_PROPERTY(bool gamepadMouse MEMBER gamepadMouse NOTIFY gamepadMouseChanged) @@ -152,6 +154,8 @@ class StreamingPreferences : public QObject bool absoluteMouseMode; bool absoluteTouchMode; bool framePacing; + bool videoEnhancement; + bool videoEnhancementVisible; bool connectionWarnings; bool richPresence; bool gamepadMouse; @@ -192,6 +196,8 @@ class StreamingPreferences : public QObject void uiDisplayModeChanged(); void windowModeChanged(); void framePacingChanged(); + void videoEnhancementChanged(); + void videoEnhancementVisibleChanged(); void connectionWarningsChanged(); void richPresenceChanged(); void gamepadMouseChanged(); diff --git a/app/streaming/video/ffmpeg.cpp b/app/streaming/video/ffmpeg.cpp index 6ecc6c88f..4453620f2 100644 --- a/app/streaming/video/ffmpeg.cpp +++ b/app/streaming/video/ffmpeg.cpp @@ -221,7 +221,8 @@ FFmpegVideoDecoder::FFmpegVideoDecoder(bool testOnly) m_VideoFormat(0), m_NeedsSpsFixup(false), m_TestOnly(testOnly), - m_DecoderThread(nullptr) + m_DecoderThread(nullptr), + m_VideoEnhancement(&VideoEnhancement::getInstance()) { SDL_zero(m_ActiveWndVideoStats); SDL_zero(m_LastWndVideoStats); @@ -709,15 +710,22 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i break; } + // Display if AI-Enhancement is enabled + const char* aiEnhanced = ""; + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + aiEnhanced = "AI-Enhanced"; + } + if (stats.receivedFps > 0) { if (m_VideoDecoderCtx != nullptr) { ret = snprintf(&output[offset], length - offset, - "Video stream: %dx%d %.2f FPS (Codec: %s)\n", + "Video stream: %dx%d %.2f FPS (Codec: %s) %s\n", m_VideoDecoderCtx->width, m_VideoDecoderCtx->height, stats.totalFps, - codecString); + codecString, + aiEnhanced); if (ret < 0 || ret >= length - offset) { SDL_assert(false); return; diff --git a/app/streaming/video/ffmpeg.h b/app/streaming/video/ffmpeg.h index eb3c1812b..536446f1e 100644 --- a/app/streaming/video/ffmpeg.h +++ b/app/streaming/video/ffmpeg.h @@ -6,6 +6,7 @@ #include "decoder.h" #include "ffmpeg-renderers/renderer.h" #include "ffmpeg-renderers/pacer/pacer.h" +#include "streaming/video/videoenhancement.h" extern "C" { #include @@ -94,6 +95,7 @@ class FFmpegVideoDecoder : public IVideoDecoder { bool m_TestOnly; SDL_Thread* m_DecoderThread; SDL_atomic_t m_DecoderThreadShouldQuit; + VideoEnhancement* m_VideoEnhancement; // Data buffers in the queued DU are not valid QQueue m_FrameInfoQueue; diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp new file mode 100644 index 000000000..5db52aefd --- /dev/null +++ b/app/streaming/video/videoenhancement.cpp @@ -0,0 +1,313 @@ +#include "videoenhancement.h" +#include +#include +#include +#include +#include + +#include + +#pragma comment(lib, "Advapi32.lib") + +/** + * \brief Constructor (Singleton) + * + * Check the capacity to handle the AI-Enhancement features such as Video Super-Resolution or SDR to HDR, according to multiple parameters such as OS or Video driver. + * + * \return void + */ +VideoEnhancement::VideoEnhancement() +{ + if(!m_Initialized){ + setGPUinformation(); + // Avoid to set variables every call of the instance + m_Initialized = true; + } +} + +/** + * \brief Get the singleton instance + * + * Render the instance of the singleton + * + * \return VideoEnhancement instance + */ +VideoEnhancement &VideoEnhancement::getInstance() +{ + static VideoEnhancement instance; + return instance; +} + +/** + * \brief Retreive GPU information + * + * Retreive all GPU information: Vendor ID, Driver version, GPU name + * + * \return bool Returns true if it successfully retreived the GPU information + */ +bool VideoEnhancement::setGPUinformation() +{ + bool success = false; + +#ifdef Q_OS_WIN + + // Create a Direct3D 11 device + ID3D11Device* pD3DDevice = nullptr; + ID3D11DeviceContext* pContext = nullptr; + + HRESULT hr = D3D11CreateDevice( + nullptr, + D3D_DRIVER_TYPE_HARDWARE, + nullptr, + D3D11_CREATE_DEVICE_DEBUG, + nullptr, + 0, + D3D11_SDK_VERSION, + &pD3DDevice, + nullptr, + &pContext + ); + + IDXGIAdapter* pAdapter = nullptr; + IDXGIDevice* pDXGIDevice = nullptr; + // Get the DXGI device from the D3D11 device. + // It identifies which GPU is being used by the application in case of multiple one (like a iGPU with a dedicated GPU). + if (SUCCEEDED(hr) && SUCCEEDED(pD3DDevice->QueryInterface(__uuidof(IDXGIDevice), (void**)&pDXGIDevice))) { + // Get the DXGI adapter from the DXGI device + if (SUCCEEDED(pDXGIDevice->GetAdapter(&pAdapter))) { + DXGI_ADAPTER_DESC adapterIdentifier; + if (SUCCEEDED(pAdapter->GetDesc(&adapterIdentifier))) { + // Convert wchar[128] to string + std::wstring description(adapterIdentifier.Description); + + // Set GPU information + m_VendorId = adapterIdentifier.VendorId; + m_GPUname = description; + m_DriverVersion = GetVideoDriverInfo(); + + qInfo() << "Active GPU: " << m_GPUname; + qInfo() << "Video Driver: " << m_DriverVersion; + + } + } + + } + + // Release resources + if (pD3DDevice) pD3DDevice->Release(); + if (pDXGIDevice) pDXGIDevice->Release(); + if (pAdapter) pAdapter->Release(); + + #endif + + return success; +} + +/** + * \brief Get the Video driver version + * + * \return int Returns the Video driver version as an integer + */ +int VideoEnhancement::GetVideoDriverInfo() +{ + + HKEY hKey = nullptr; + const wchar_t* SUBKEY = L"SYSTEM\\CurrentControlSet\\Control\\Video"; + + if (ERROR_SUCCESS != RegOpenKeyExW(HKEY_LOCAL_MACHINE, SUBKEY, 0, KEY_ENUMERATE_SUB_KEYS, &hKey)) + return m_DriverVersion; + + LSTATUS sta = ERROR_SUCCESS; + wchar_t keyName[128] = {}; + DWORD index = 0; + DWORD len; + + do + { + len = sizeof(keyName) / sizeof(wchar_t); + sta = RegEnumKeyExW(hKey, index, keyName, &len, nullptr, nullptr, nullptr, nullptr); + index++; + + if (sta != ERROR_SUCCESS) + continue; + + std::wstring subkey(SUBKEY); + subkey.append(L"\\"); + subkey.append(keyName); + subkey.append(L"\\"); + subkey.append(L"0000"); + DWORD lg; + + wchar_t desc[128] = {}; + lg = sizeof(desc) / sizeof(wchar_t); + if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, subkey.c_str(), L"DriverDesc", + RRF_RT_REG_SZ, nullptr, desc, &lg)) + continue; + + std::wstring s_desc(desc); + if (s_desc != m_GPUname) + continue; + + // Driver of interest found, we read version + wchar_t charVersion[64] = {}; + lg = sizeof(charVersion) / sizeof(wchar_t); + if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, subkey.c_str(), L"DriverVersion", + RRF_RT_REG_SZ, nullptr, charVersion, &lg)) + continue; + + std::wstring strVersion(charVersion); + + // Convert driver store version to Nvidia version + if (isVendorNVIDIA()) // NVIDIA + { + strVersion = std::regex_replace(strVersion, std::wregex(L"\\."), L""); + m_DriverVersion = std::stoi(strVersion.substr(strVersion.length() - 5, 5)); + } + else // AMD/Intel/WDDM + { + m_DriverVersion = std::stoi(strVersion.substr(0, strVersion.find('.'))); + } + } while (sta == ERROR_SUCCESS); + RegCloseKey(hKey); + + return m_DriverVersion; +} + +/** + * \brief Check if the vendor is AMD + * + * \return bool Returns true is the vendor is AMD + */ +bool VideoEnhancement::isVendorAMD(){ + return m_VendorId == VENDOR_ID_AMD; +} + +/** + * \brief Check if the vendor is Intel + * + * \return bool Returns true is the vendor is Intel + */ +bool VideoEnhancement::isVendorIntel(){ + return m_VendorId == VENDOR_ID_INTEL; +} + +/** + * \brief Check if the vendor is NVIDIA + * + * \return bool Returns true is the vendor is NVIDIA + */ +bool VideoEnhancement::isVendorNVIDIA(){ + return m_VendorId == VENDOR_ID_NVIDIA; +} + +/** + * \brief Check the Video Super-Resolution capability + * + * Check if the GPU used is capable of providing VSR feature according to its serie or driver version + * + * \return bool Returns true if the VSR feature is available + */ +bool VideoEnhancement::isVSRcapable(){ + if(isVendorAMD()){ + // [TODO] To be done once AMD provides the VSR solution + // Driver > 24 && RX 7000+ + } else if(isVendorIntel()){ + // All CPU with iGPU (Gen 10th), or dedicated GPU, are capable + if(m_DriverVersion >= MIN_VSR_DRIVER_VERSION_INTEL){ + return true; + } + } else if(isVendorNVIDIA()){ + // RTX VSR v1.5 supports NVIDIA RTX Series 20 starting from the Windows drive 545.84 (Oct 17, 2023). + if( + m_GPUname.find(L"RTX") != std::wstring::npos + && m_DriverVersion >= MIN_VSR_DRIVER_VERSION_NVIDIA + ){ + return true; + } + } + return false; +} + +/** + * \brief Check the HDR capability + * + * Check if the GPU used is capable of providing SDR to HDR feature according to its serie or driver version + * + * \return bool Returns true if the HDR feature is available + */ +bool VideoEnhancement::isHDRcapable(){ + if(isVendorAMD()){ + // Not yet announced by AMD + } else if(isVendorIntel()){ + // Not yet announced by Intel + } else if(isVendorNVIDIA()){ + // RTX VSR v1.5 supports NVIDIA RTX Series 20 starting from the Windows drive 545.84 (Oct 17, 2023). + if( + m_GPUname.find(L"RTX") != std::wstring::npos + && m_DriverVersion >= MIN_HDR_DRIVER_VERSION_NVIDIA + ){ + return true; + } + } + return false; +} + +/** + * \brief Check the AI-Enhancement capability + * + * Check if the GPU used is capable of enhancing the video + * + * \return bool Returns true if the such capability is available + */ +bool VideoEnhancement::isEnhancementCapable(){ + return isVSRcapable() || isHDRcapable(); +} + +/** + * \brief Check if Video Enhancement feature is enabled + * + * \return bool Returns true if the Video Enhancement feature is enabled + */ +bool VideoEnhancement::isVideoEnhancementEnabled(){ + return m_Enabled; +} + +/** + * \brief Enable Video Enhancement feature + * + * \param bool activate Default is true, at true it enables the use of Video Enhancement feature + * \return bool Returns true if the Video Enhancement feature is available + */ +bool VideoEnhancement::enableVideoEnhancement(bool activate){ + m_Enabled = isEnhancementCapable() && activate; + return m_Enabled; +} + +/** + * \brief Enable Video Enhancement accessibility from the settings interface + * + * \param bool visible Default is true, at true it displays Video Enhancement feature + * \return bool Returns true if the Video Enhancement feature is available + */ +void VideoEnhancement::enableUIvisible(bool visible){ + m_UIvisible = visible; +} + +/** + * \brief Check if Video Enhancement feature is accessible from the settings interface + * + * \return bool Returns true if the Video Enhancement feature is accessible + */ +bool VideoEnhancement::isUIvisible(){ + return m_UIvisible; +} + +/** + * \brief Check if Video Enhancement feature is experimental from the vendor + * + * \return bool Returns true if the Video Enhancement feature is experimental + */ +bool VideoEnhancement::isExperimental(){ + // [Jan 31st 2024] AMD's is not yet available, Intel's is experimental, NVIDIA's is official + return isVendorIntel(); +} diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h new file mode 100644 index 000000000..24d8e7502 --- /dev/null +++ b/app/streaming/video/videoenhancement.h @@ -0,0 +1,71 @@ +#ifndef VIDEOENHANCEMENT_H +#define VIDEOENHANCEMENT_H + +#pragma once + +#include +#include +#include +#include +#include + +class VideoEnhancement : public QObject +{ + Q_OBJECT + +private: + + static VideoEnhancement* instance; + + bool m_Initialized = false; + bool m_Enabled = false; + bool m_UIvisible = true; // [Bruno] It should be false, and turn true by dxva2.cpp + + // Vendors' name (PCI Special Interest Group) + const int VENDOR_ID_AMD = 4098; + const int VENDOR_ID_INTEL = 32902; + const int VENDOR_ID_NVIDIA = 4318; + + // Minimum driver version accepted for VSR feature + const int MIN_VSR_DRIVER_VERSION_AMD = 24; // It is implemented from the driver 24.1.1 + const int MIN_VSR_DRIVER_VERSION_INTEL = 28; // It will ensure to cover 27.20 version + const int MIN_VSR_DRIVER_VERSION_NVIDIA = 54584; // NVIDIA driver name are always in the format XXX.XX (https://www.nvidia.com/en-gb/drivers/drivers-faq/) + + // Minimum driver version accepted for HDR feature + const int MIN_HDR_DRIVER_VERSION_AMD = 0; // To be determined, this feature has not yet been announced by AMD + const int MIN_HDR_DRIVER_VERSION_INTEL = 0; // To be determined, this feature has not yet been announced by Intel + const int MIN_HDR_DRIVER_VERSION_NVIDIA = 55123; // https://www.nvidia.com/download/driverResults.aspx/218114/en-us/ + + // GPU information + int m_VendorId = 0; + std::wstring m_GPUname = L"Unknown"; + int m_DriverVersion = 0; + + // Disable the constructor from outside to avoid a new instance + VideoEnhancement(); + + // Private copy constructor and assignment operator to prevent duplication + VideoEnhancement(const VideoEnhancement&); + VideoEnhancement& operator=(const VideoEnhancement&); + + bool setGPUinformation(); + int GetVideoDriverInfo(); + +public: + static VideoEnhancement& getInstance(); + bool isVendorAMD(); + bool isVendorIntel(); + bool isVendorNVIDIA(); + bool isEnhancementCapable(); + bool isVSRcapable(); + bool isHDRcapable(); + bool isVideoEnhancementEnabled(); + bool enableVideoEnhancement(bool activate = true); + void enableUIvisible(bool visible = true); + + Q_INVOKABLE bool isUIvisible(); + Q_INVOKABLE bool isExperimental(); + +}; + +#endif // VIDEOENHANCEMENT_H From d40a8f1e30aa69f54ef816932af080f5e78fd008 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Thu, 8 Feb 2024 02:45:22 +0100 Subject: [PATCH 02/53] Add AI Super Resolution for NVIDIA and Intel GPUs Adding VideoProcessor to D3D11VA to offline video processing from CPU to GPU, and leveraging additional GPU capabilities such as AI enhancement for the upscaling and some filtering. Changes made: 1. VideoProcessor is used to render the frame only when "Video AI-Enhancement" is enabled; when disabled, the whole process is unchanged. 2. Add methods to enable the Video Super Resolution for NVIDIA, and Intel. AMD method is currently empty, need to POC the solution with the AMF documentation. 3. Add methods to enable SDR to HDR. Currently only NVIDIA has such feature, but the code place is prepared if Intel and AMD will too. 4. Some existing variables local to a method (like BackBufferResource) changed to global scope to be consumed be also VideoProcessor methods. 5. In ::initialize(), the application checks if the system is capable of leveraging GPU AI enhancement, if yes, it inform the UI to display the feature. 6. ColorSpace setups (Source/Stream) for HDR are not optimal, further improvment might be possible. Issues observed are commented in the code at relevant places. --- .../video/ffmpeg-renderers/d3d11va.cpp | 747 +++++++++++++++++- .../video/ffmpeg-renderers/d3d11va.h | 80 +- app/streaming/video/videoenhancement.cpp | 4 +- app/streaming/video/videoenhancement.h | 4 +- 4 files changed, 814 insertions(+), 21 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 1ac76d019..f636ac337 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -7,13 +7,28 @@ #include "streaming/streamutils.h" #include "streaming/session.h" +#include "settings/streamingpreferences.h" +#include "streaming/video/videoenhancement.h" + +#include +#include +#include +#include +#include + +extern "C" { +#include +} #include #include #include -#define SAFE_COM_RELEASE(x) if (x) { (x)->Release(); } +#pragma comment(lib, "d3d11.lib") +#pragma comment(lib, "dxgi.lib") + +#define SAFE_COM_RELEASE(x) if (x) { (x)->Release(); (x) = nullptr; } typedef struct _VERTEX { @@ -80,6 +95,10 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_SwapChain(nullptr), m_DeviceContext(nullptr), m_RenderTargetView(nullptr), + m_VideoDevice(nullptr), + m_VideoContext(nullptr), + m_VideoProcessor(nullptr), + m_VideoProcessorEnumerator(nullptr), m_LastColorSpace(-1), m_LastFullRange(false), m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), @@ -101,6 +120,8 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_ContextLock = SDL_CreateMutex(); DwmEnableMMCSS(TRUE); + + m_VideoEnhancement = &VideoEnhancement::getInstance(); } D3D11VARenderer::~D3D11VARenderer() @@ -134,6 +155,8 @@ D3D11VARenderer::~D3D11VARenderer() SAFE_COM_RELEASE(m_OverlayPixelShader); + SAFE_COM_RELEASE(m_BackBufferResource); + SAFE_COM_RELEASE(m_RenderTargetView); SAFE_COM_RELEASE(m_SwapChain); @@ -150,11 +173,130 @@ D3D11VARenderer::~D3D11VARenderer() else { SAFE_COM_RELEASE(m_Device); SAFE_COM_RELEASE(m_DeviceContext); + SAFE_COM_RELEASE(m_VideoDevice); + SAFE_COM_RELEASE(m_VideoContext); } SAFE_COM_RELEASE(m_Factory); } +/** + * \brief Set Monitor HDR MetaData information + * + * Get the Monitor HDT MetaData via LimeLight library + * + * \param PSS_HDR_METADATA* HDRMetaData The varaible to set the metadata information + * \return bool Return True is succeed + */ +void D3D11VARenderer::setHDRStream(){ + DXGI_HDR_METADATA_HDR10 streamHDRMetaData; + // Prepare HDR Meta Data for Stream content + SS_HDR_METADATA hdrMetadata; + // if (m_VideoProcessor.p && m_IsHDRenabled && LiGetHdrMetadata(&hdrMetadata)) { + if (m_VideoProcessor.p && LiGetHdrMetadata(&hdrMetadata)) { + + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kMinLuminanceFixedPoint = 10000; + + streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; + streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; + streamHDRMetaData.GreenPrimary[0] = hdrMetadata.displayPrimaries[1].x; + streamHDRMetaData.GreenPrimary[1] = hdrMetadata.displayPrimaries[1].y; + streamHDRMetaData.BluePrimary[0] = hdrMetadata.displayPrimaries[2].x; + streamHDRMetaData.BluePrimary[1] = hdrMetadata.displayPrimaries[2].y; + streamHDRMetaData.WhitePoint[0] = hdrMetadata.whitePoint.x; + streamHDRMetaData.WhitePoint[1] = hdrMetadata.whitePoint.y; + streamHDRMetaData.MaxMasteringLuminance = hdrMetadata.maxDisplayLuminance * kMinLuminanceFixedPoint; + streamHDRMetaData.MinMasteringLuminance = hdrMetadata.minDisplayLuminance; + + streamHDRMetaData.MaxContentLightLevel = hdrMetadata.maxContentLightLevel; + streamHDRMetaData.MaxFrameAverageLightLevel = hdrMetadata.maxFrameAverageLightLevel; + if(streamHDRMetaData.MaxContentLightLevel == 0){ + streamHDRMetaData.MaxContentLightLevel = streamHDRMetaData.MaxFrameAverageLightLevel; + } + + // [TODO] (Source: https://github.com/xbmc) For AMD/HDR, + // we apparently need to do a custom tone (MaxMasteringLuminance=10000, MinMasteringLuminance=0). + // Yet to be verified + // if(m_VideoEnhancement->isVendorAMD()){ + // m_StreamHDRMetaData.MaxMasteringLuminance = 10000; + // m_StreamHDRMetaData.MinMasteringLuminance = 0; + // } + + // Set HDR Stream (input) Meta data + m_VideoContext->VideoProcessorSetStreamHDRMetaData( + m_VideoProcessor, + 0, + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &streamHDRMetaData + ); + } +} + +/** + * \brief Set Monitor HDR MetaData information + * + * Get the Monitor HDT MetaData via LimeLight library + * + * \param PSS_HDR_METADATA* HDRMetaData The varaible to set the metadata information + * \return bool Return True is succeed + */ +void D3D11VARenderer::setHDROutPut(){ + + DXGI_HDR_METADATA_HDR10 outputHDRMetaData; + + // Find the monitor attached to the application + Microsoft::WRL::ComPtr pOutput; + if (SUCCEEDED(m_SwapChain->GetContainingOutput(&pOutput))) { + Microsoft::WRL::ComPtr pOutput6; + if (SUCCEEDED(pOutput.As(&pOutput6))){ + DXGI_OUTPUT_DESC1 desc1 {}; + if (SUCCEEDED(pOutput6->GetDesc1(&desc1))){ + // Get Monitor ColorSpace for SDR and HDR (if the monitor is capable of HDR) + m_OutputColorSpace = desc1.ColorSpace; + + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kPrimariesFixedPoint = 50000; + static constexpr int kMinLuminanceFixedPoint = 10000; + + // Format Monitor HDR MetaData + outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; + outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; + outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; + // MaxContentLightLevel is not available in DXGI_OUTPUT_DESC1 structure + // https://learn.microsoft.com/fr-fr/windows/win32/api/dxgi1_6/ns-dxgi1_6-dxgi_output_desc1 + // But MaxContentLightLevel is not needed and greater or equal to MaxFullFrameLuminance, so it is safe to set a minimum for it + // https://professionalsupport.dolby.com/s/article/Calculation-of-MaxFALL-and-MaxCLL-metadata + // Also note that these are not fixed-point. + outputHDRMetaData.MaxContentLightLevel = desc1.MaxFullFrameLuminance; + outputHDRMetaData.MaxFrameAverageLightLevel = desc1.MaxFullFrameLuminance; + } + } + } + + if (m_VideoProcessor.p) { + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor, + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + } + + m_SwapChain->SetHDRMetaData(DXGI_HDR_METADATA_TYPE_HDR10, sizeof(outputHDRMetaData), &outputHDRMetaData); +} + bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) { bool success = false; @@ -218,11 +360,33 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } + // Get video device + if (!m_VideoDevice) { + hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), + (void**)&m_VideoDevice); + if (FAILED(hr)) { + return false; + } + } + + // Get video context + if (!m_VideoContext) { + hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), + (void**)&m_VideoContext); + if (FAILED(hr)) { + return false; + } + } + if (!checkDecoderSupport(adapter)) { m_DeviceContext->Release(); m_DeviceContext = nullptr; m_Device->Release(); m_Device = nullptr; + m_VideoContext->Release(); + m_VideoContext = nullptr; + m_VideoDevice->Release(); + m_VideoDevice = nullptr; goto Exit; } @@ -237,9 +401,242 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter return success; } +/** + * \brief Enable Video Super-Resolution for AMD GPU + * + * This feature is available starting from AMD series 7000 and driver AMD Software 24.1.1 (Jan 23, 2024) + * https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + * + * \return void + */ +void D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ + // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 check how to implement it + // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + if(m_VideoEnhancement->isVendorAMD() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ + // [TODO] Implement AMD Video Scaler + // Documentation and DX11 code sample + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_VQ_Enhancer_API.md + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp + } +} + +/** + * \brief Enable Video Super-Resolution for Intel GPU + * + * This experimental feature from Intel is available starting from Intel iGPU from CPU Gen 10th (Skylake) and Intel graphics driver 27.20.100.8681 (Sept 15, 2020) + * Only Arc GPUs seem to provide visual improvement + * https://www.techpowerup.com/305558/intel-outs-video-super-resolution-for-chromium-browsers-works-with-igpus-11th-gen-onward + * + * \return void + */ +void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ + HRESULT hr; + + if(m_VideoEnhancement->isVendorIntel() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ + + constexpr GUID GUID_INTEL_VPE_INTERFACE = {0xedd1d4b9, 0x8659, 0x4cbc, {0xa4, 0xd6, 0x98, 0x31, 0xa2, 0x16, 0x3a, 0xc3}}; + constexpr UINT kIntelVpeFnVersion = 0x01; + constexpr UINT kIntelVpeFnMode = 0x20; + constexpr UINT kIntelVpeFnScaling = 0x37; + constexpr UINT kIntelVpeVersion3 = 0x0003; + constexpr UINT kIntelVpeModeNone = 0x0; + constexpr UINT kIntelVpeModePreproc = 0x01; + constexpr UINT kIntelVpeScalingDefault = 0x0; + constexpr UINT kIntelVpeScalingSuperResolution = 0x2; + + UINT param = 0; + + struct IntelVpeExt + { + UINT function; + void* param; + }; + + IntelVpeExt ext{0, ¶m}; + + ext.function = kIntelVpeFnVersion; + param = kIntelVpeVersion3; + + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE version failed: %x", + hr); + return; + } + + ext.function = kIntelVpeFnMode; + if(activate){ + param = kIntelVpeModePreproc; + } else { + param = kIntelVpeModeNone; + } + + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE mode failed: %x", + hr); + return; + } + + ext.function = kIntelVpeFnScaling; + if(activate){ + param = kIntelVpeScalingSuperResolution; + } else { + param = kIntelVpeScalingDefault; + } + + hr = m_VideoContext->VideoProcessorSetStreamExtension( + m_VideoProcessor, 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel Video Super Resolution failed: %x", + hr); + return; + } + + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); + } + } +} + +/** + * \brief Enable Video Super-Resolution for NVIDIA + * + * This feature is available starting from series NVIDIA RTX 2000 and GeForce driver 545.84 (Oct 17, 2023) + * + * IMPORTANT (Feb 5th, 2024): RTX VSR seems to be limited to SDR content only, + * it does add a grey filter if it is activated while HDR is on on stream (Host setting does not impact it). + * It might be fixed later by NVIDIA, but the temporary solution is to disable the feature when Stream content is HDR-on + * + * \return void + */ +void D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ + HRESULT hr; + + + if(m_VideoEnhancement->isVendorNVIDIA() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ + + // Toggle VSR + constexpr GUID GUID_NVIDIA_PPE_INTERFACE = {0xd43ce1b3, 0x1f4b, 0x48ac, {0xba, 0xee, 0xc3, 0xc2, 0x53, 0x75, 0xe6, 0xf7}}; + constexpr UINT kStreamExtensionVersionV1 = 0x1; + constexpr UINT kStreamExtensionMethodSuperResolution = 0x2; + + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable; + }; + + // Convert bool to UINT + UINT enable = activate; + + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV1, kStreamExtensionMethodSuperResolution, enable}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor, 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX Video Super Resolution failed: %x", + hr); + return; + } + + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); + } + } +} + +/** + * \brief Enable HDR for AMD GPU + * + * This feature is not availble for AMD, and has not yet been announced (by Jan 24th, 2024) + * + * \return void + */ +void D3D11VARenderer::enableAMDHDR(bool activate){ + if(m_VideoEnhancement->isVendorAMD() && m_VideoEnhancement->isHDRcapable()){ + // [TODO] Feature not yet announced + } +} + +/** + * \brief Enable HDR for Intel GPU + * + * This feature is not availble for Intel, and has not yet been announced (by Jan 24th, 2024) + * + * \return void + */ +void D3D11VARenderer::enableIntelHDR(bool activate){ + if(m_VideoEnhancement->isVendorIntel() && m_VideoEnhancement->isHDRcapable()){ + // [TODO] Feature not yet announced + } +} + +/** + * \brief Enable HDR for NVIDIA + * + * This feature is available starting from series NVIDIA RTX 2000 and GeForce driver 545.84 (Oct 17, 2023) + * + * Note: Even if the feature is enabled, I could not find any settings of ColorSpace and DXG8Format which + * can work without having the screen darker. Here are what I found: + * 1) Moonlight HDR: Checked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_P010 => SDR convert to HDR, but with darker rendering + * 2) Moonlight HDR: Unchecked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_NV12 => SDR convert to HDR, but with darker rendering + * + * \return void + */ +void D3D11VARenderer::enableNvidiaHDR(bool activate){ + HRESULT hr; + + if(m_VideoEnhancement->isVendorNVIDIA() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isHDRcapable()){ + + // Toggle HDR + constexpr GUID GUID_NVIDIA_TRUE_HDR_INTERFACE = {0xfdd62bb4, 0x620b, 0x4fd7, {0x9a, 0xb3, 0x1e, 0x59, 0xd0, 0xd5, 0x44, 0xb3}}; + constexpr UINT kStreamExtensionVersionV4 = 0x4; + constexpr UINT kStreamExtensionMethodTrueHDR = 0x3; + + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable : 1; + UINT reserved : 31; + }; + + // Convert bool to UINT + UINT enable = activate; + + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV4, kStreamExtensionMethodTrueHDR, enable, 0u}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor, 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX HDR failed: %x", + hr); + return; + } + + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); + } + } +} + bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) { - int adapterIndex, outputIndex; HRESULT hr; m_DecoderParams = *params; @@ -258,7 +655,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } if (!SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(params->window), - &adapterIndex, &outputIndex)) { + &m_AdapterIndex, &m_OutputIndex)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "SDL_DXGIGetOutputInfo() failed: %s", SDL_GetError()); @@ -275,12 +672,12 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // First try the adapter corresponding to the display where our window resides. // This will let us avoid a copy if the display GPU has the required decoder. - if (!createDeviceByAdapterIndex(adapterIndex)) { + if (!createDeviceByAdapterIndex(m_AdapterIndex)) { // If that didn't work, we'll try all GPUs in order until we find one // or run out of GPUs (DXGI_ERROR_NOT_FOUND from EnumAdapters()) bool adapterNotFound = false; for (int i = 0; !adapterNotFound; i++) { - if (i == adapterIndex) { + if (i == m_AdapterIndex) { // Don't try the same GPU again continue; } @@ -339,6 +736,14 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; } + // [TODO] With NVIDIA RTX, while renderering using VideoProcessor with HDR activated in Moonlight, + // DXGI_FORMAT_R10G10B10A2_UNORM gives worse result than DXGI_FORMAT_R8G8B8A8_UNORM. + // Without this fix, HDR off on server renders gray screen and VSR is inactive (DXGI_COLOR_SPACE_TYPE type 8). + // For user perspective, it is better to not see such a bug, so for the moment I choose to force DXGI_FORMAT_R8G8B8A8_UNORM + if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_VideoEnhancement->isVendorNVIDIA()){ + swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; + } + // Use DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING with flip mode for non-vsync case, if possible. // NOTE: This is only possible in windowed or borderless windowed mode. if (!params->enableVsync) { @@ -435,6 +840,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // AVHWDeviceContext takes ownership of these objects d3d11vaDeviceContext->device = m_Device; d3d11vaDeviceContext->device_context = m_DeviceContext; + d3d11vaDeviceContext->video_device = m_VideoDevice; + d3d11vaDeviceContext->video_context = m_VideoContext; // Set lock functions that we will use to synchronize with FFmpeg's usage of our device context d3d11vaDeviceContext->lock = lockContext; @@ -455,6 +862,47 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } + // Check if the GPU is capable of AI-Enhancement + // This capability setup is place in this method because it is only available on FFmpeg with DirectX for hardware acceleration + m_VideoEnhancement->enableVideoEnhancement(false); + if(m_VideoEnhancement->isEnhancementCapable()){ + + // Enable the visibility of Video enhancement feature + m_VideoEnhancement->enableUIvisible(); + + StreamingPreferences streamingPreferences; + if(streamingPreferences.videoEnhancement){ + + if(createVideoProcessor()){ + m_VideoEnhancement->enableVideoEnhancement(true); + } + + // Enable VSR feature if available + if(m_VideoEnhancement->isVSRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaVideoSuperResolution(); + } + } + + // Enable SDR->HDR feature if available + if(m_VideoEnhancement->isHDRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDHDR(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelHDR(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaHDR(); + } + } + } + } + + SAFE_COM_RELEASE(m_BackBufferResource); + return true; } @@ -681,6 +1129,95 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) m_LastFullRange = fullRange; } +/** + * \brief Set the output colorspace + * + * According to the colorspace from the source, set the corresponding output colorspace + * + * \param AVFrame* frame The frame to be displayed on screen + * \return void + */ +void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) +{ + //Do Nothing when Moolight's HDR is disabled + if(!m_IsHDRenabled){ + return; + } + + bool frameFullRange = isFrameFullRange(frame); + int frameColorSpace = getFrameColorspace(frame); + + // [TODO] Fix the bug with VideoProcessorSetStreamColorSpace1 not working from the first frame + // BUG: If I try to set m_StreamColorSpace correctly since the beginning (=14), the renderer doesn't apply the color space, + // The frame appear gray. The temporary fix is to start from a wrong Color space (13), and switch to 14 after few milliseconds. + // At set the time to 100ms to not have any visual impact at loading, but even 1ms fix the issue, the bug might be linked to the 1st frame. + // This is a non-blocking issue, but I need to investigate further the reason of such a behavior. + auto now = std::chrono::system_clock::now(); + long nowTime = std::chrono::duration_cast(now.time_since_epoch()).count(); + if(setStreamColorSpace && nowTime >= nextTime){ + nextTime = nowTime + increment; + if(streamIndex >= 1){ + setStreamColorSpace = false; + } + switch (frameColorSpace) { + case COLORSPACE_REC_2020: + m_StreamColorSpace = StreamColorSpacesFixHDR[streamIndex]; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + break; + default: + m_StreamColorSpace = StreamColorSpacesFixSDR[streamIndex]; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + } + if(setStreamColorSpace){ + streamIndex++; + } + } + + // If nothing has changed since last frame, we're done + if (frameColorSpace == m_LastColorSpace && frameFullRange == m_LastFullRange) { + return; + } + + m_LastColorSpace = frameColorSpace; + m_LastFullRange = frameFullRange; + + switch (frameColorSpace) { + case COLORSPACE_REC_2020: + // For an unclear reason in HDR mode (D3D11 bug?), when the 4 following filters, Brightness (0), Contrast (100), hue (0) and saturation (100), + // are all together at their default value, the tone tends to slight red. It is easy to see when streaming its own screen + // using an inception effect. + // The solution is the set Hue at -1, it doesn't impact the visual (compare to others), and it fixes the color issue. + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, -1); + // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disbaled (which is fine as we already have native HDR) + m_StreamColorSpace = ColorSpaces[14]; + if(m_VideoEnhancement->isVendorNVIDIA()){ + // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) + // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) + enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. + } + // Reset the fix HDR Stream + setStreamColorSpace = true; + streamIndex = 0; + break; + default: + // For SDR we can use default values. + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, 0); + // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) + // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, + // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering + m_StreamColorSpace = ColorSpaces[8]; + if(m_VideoEnhancement->isVendorNVIDIA()){ + // Always enable NVIDIA VSR for SDR Stream content + enableNvidiaVideoSuperResolution(); + } + // Reset the fix SDR Stream as it does work when back and forth with HDR on Server + setStreamColorSpace = true; + streamIndex = 0; + } + + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); +} + void D3D11VARenderer::renderVideo(AVFrame* frame) { // Bind video rendering vertex buffer @@ -698,14 +1235,189 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) srcBox.back = 1; m_DeviceContext->CopySubresourceRegion(m_VideoTexture, 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); - // Bind our CSC shader (and constant buffer, if required) - bindColorConversion(frame); + // Draw the video + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Prepare the Stream + prepareVideoProcessorStream(frame); + // Render to the front the frames processed by the Video Processor + m_VideoContext->VideoProcessorBlt(m_VideoProcessor, m_OutputView.Get(), 0, 1, &m_StreamData); + } else { + // Bind our CSC shader (and constant buffer, if required) + bindColorConversion(frame); - // Bind SRVs for this frame - m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews); + // Bind SRVs for this frame + m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews); - // Draw the video - m_DeviceContext->DrawIndexed(6, 0, 0); + // Draw the video + m_DeviceContext->DrawIndexed(6, 0, 0); + } +} + +/** + * \brief Add the Video Processor to the pipeline + * + * Creating a Video Processor add additional GPU video processing method like AI Upscaling + * + * \param bool reset default is false, at true it forces the recreate the Video Processor + * \return bool Returns true if the Video processor is successfully created + */ +bool D3D11VARenderer::createVideoProcessor(bool reset) +{ + HRESULT hr; + + // [TODO] This timer is only used to fix a problem with VideoProcessorSetStreamColorSpace1 not properly applied at the beginning + // These 3 lines can be removed once the bug (non-blocking) is fixed. + auto now = std::chrono::system_clock::now(); + startTime = std::chrono::duration_cast(now.time_since_epoch()).count(); + nextTime = startTime + increment; + + D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; + ZeroMemory(&content_desc, sizeof(content_desc)); + + if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { + hr = m_VideoProcessorEnumerator->GetVideoProcessorContentDesc(&content_desc); + if (FAILED(hr)) + return false; + + if (content_desc.InputWidth != m_DecoderParams.width || + content_desc.InputHeight != m_DecoderParams.height || + content_desc.OutputWidth != m_DisplayWidth || + content_desc.OutputHeight != m_DisplayHeight || reset) { + m_VideoProcessorEnumerator.Release(); + m_VideoProcessor.Release(); + } + else { + return true; + } + } + + ZeroMemory(&content_desc, sizeof(content_desc)); + content_desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; + content_desc.InputFrameRate.Numerator = m_DecoderParams.frameRate; + content_desc.InputFrameRate.Denominator = 1; + content_desc.InputWidth = m_DecoderParams.width; + content_desc.InputHeight = m_DecoderParams.height; + content_desc.OutputWidth = m_DisplayWidth; + content_desc.OutputHeight = m_DisplayHeight; + content_desc.OutputFrameRate.Numerator = m_DecoderParams.frameRate; + content_desc.OutputFrameRate.Denominator = 1; + content_desc.Usage = D3D11_VIDEO_USAGE_OPTIMAL_SPEED; + + hr = m_VideoDevice->CreateVideoProcessorEnumerator(&content_desc, &m_VideoProcessorEnumerator); + if (FAILED(hr)) + return false; + + hr = m_VideoDevice->CreateVideoProcessor(m_VideoProcessorEnumerator, 0, + &m_VideoProcessor); + if (FAILED(hr)) + return false; + + m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor, 0, false); + m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); + + // The output surface will be read by Direct3D shaders (It seems useless in our context) + m_VideoContext->VideoProcessorSetOutputShaderUsage(m_VideoProcessor, true); + + // Set Background color + D3D11_VIDEO_COLOR bgColor; + bgColor.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor, true, &bgColor); + + ZeroMemory(&m_OutputViewDesc, sizeof(m_OutputViewDesc)); + m_OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; + m_OutputViewDesc.Texture2D.MipSlice = 0; + + hr = m_VideoDevice->CreateVideoProcessorOutputView( + m_BackBufferResource, + m_VideoProcessorEnumerator, + &m_OutputViewDesc, + (ID3D11VideoProcessorOutputView**)&m_OutputView); + if (FAILED(hr)) { + return false; + } + + ZeroMemory(&m_InputViewDesc, sizeof(m_InputViewDesc)); + m_InputViewDesc.FourCC = 0; + m_InputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + m_InputViewDesc.Texture2D.MipSlice = 0; + m_InputViewDesc.Texture2D.ArraySlice = 0; + + hr = m_VideoDevice->CreateVideoProcessorInputView( + m_VideoTexture, m_VideoProcessorEnumerator, &m_InputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); + if (FAILED(hr)) + return false; + + // Apply processed filters to the surface + RECT srcRect = { 0 }; + srcRect.right = m_DecoderParams.width; + srcRect.bottom = m_DecoderParams.height; + + RECT dstRect = { 0 }; + dstRect.right = m_DisplayWidth; + dstRect.bottom = m_DisplayHeight; + + // Sscale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + + // [TODO] There is a behavior I don't understand (bug?) when the destination desRect is larger by one of its side than the source srcRect. + // If it is bigger, the window becomes black, but if it is smaller it is fine. + // Only one case is working when it is bigger is when the dstRest perfectly equal to the Display size. + // Investigation: If there anything to do with pixel alignment (c.f. dxva2.cpp FFALIGN), or screenSpaceToNormalizedDeviceCoords ? + // Fix: When bigger we strech the picture to the window, it will be deformed, but at least will not crash. + if(m_DisplayWidth < m_DecoderParams.width && m_DisplayHeight < m_DecoderParams.height){ + if(ratioHeight < ratioWidth){ + // Adjust the Width + long width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); + dstRect.left = static_cast(std::floor( abs(m_DisplayWidth - width) / 2 )); + dstRect.right = dstRect.left + width; + } else if(ratioWidth < ratioHeight) { + // Adjust the Height + long height = static_cast(std::floor(m_DecoderParams.height * ratioWidth)); + dstRect.top = static_cast(std::floor( abs(m_DisplayHeight - height) / 2 )); + dstRect.bottom = dstRect.top + height; + } + } + + m_VideoContext->VideoProcessorSetStreamSourceRect(m_VideoProcessor, 0, true, &srcRect); + m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor, 0, true, &dstRect); + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor, 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); + + ZeroMemory(&m_StreamData, sizeof(m_StreamData)); + m_StreamData.Enable = true; + m_StreamData.OutputIndex = m_OutputIndex; + m_StreamData.InputFrameOrField = 0; + m_StreamData.PastFrames = 0; + m_StreamData.FutureFrames = 0; + m_StreamData.ppPastSurfaces = nullptr; + m_StreamData.ppFutureSurfaces = nullptr; + m_StreamData.pInputSurface = m_InputView.Get(); + m_StreamData.ppPastSurfacesRight = nullptr; + m_StreamData.ppFutureSurfacesRight = nullptr; + m_StreamData.pInputSurfaceRight = nullptr; + + // Prepare HDR Meta Data for Stream content + setHDRStream(); + + // Prepare HDR Meta Data for the OutPut Monitor, will be ignored while using SDR + setHDROutPut(); + + // Set OutPut ColorSpace, m_OutputColorSpace is found from the active monitor earlier in D3D11VARenderer::initialize() + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, m_OutputColorSpace); + + // The section is a customization to enhance (non-AI) shlithly the frame + // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 50); // (0 / 0 / 100) + // As no effect as the picture is not distorted + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) + + + setStreamColorSpace = true; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + + return true; } // This function must NOT use any DXGI or ID3D11DeviceContext methods @@ -860,6 +1572,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) // Check if the format is supported by this decoder BOOL supported; + m_IsHDRenabled = false; switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: @@ -905,6 +1618,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } + m_IsHDRenabled = true; break; case VIDEO_FORMAT_AV1_MAIN8: @@ -935,6 +1649,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } + m_IsHDRenabled = true; break; default: @@ -1130,8 +1845,7 @@ bool D3D11VARenderer::setupRenderingResources() // Create our render target view { - ID3D11Resource* backBufferResource; - hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&backBufferResource); + hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&m_BackBufferResource); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "IDXGISwapChain::GetBuffer() failed: %x", @@ -1139,8 +1853,8 @@ bool D3D11VARenderer::setupRenderingResources() return false; } - hr = m_Device->CreateRenderTargetView(backBufferResource, nullptr, &m_RenderTargetView); - backBufferResource->Release(); + hr = m_Device->CreateRenderTargetView(m_BackBufferResource, nullptr, &m_RenderTargetView); + // m_BackBufferResource is still needed in createVideoProcessor(), therefore will be released later if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateRenderTargetView() failed: %x", @@ -1280,7 +1994,8 @@ bool D3D11VARenderer::setupVideoTexture() texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; - texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement + texDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 8421fdb46..bc10e4d89 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -2,8 +2,12 @@ #include "renderer.h" -#include +#include #include +#include +#include +#include +#include "streaming/video/videoenhancement.h" extern "C" { #include @@ -30,12 +34,24 @@ class D3D11VARenderer : public IFFmpegRenderer bool setupVideoTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); + void prepareVideoProcessorStream(AVFrame* frame); void renderVideo(AVFrame* frame); + bool createVideoProcessor(bool reset = false); + void enableAMDVideoSuperResolution(bool activate = true); + void enableIntelVideoSuperResolution(bool activate = true); + void enableNvidiaVideoSuperResolution(bool activate = true); + void enableAMDHDR(bool activate = true); + void enableIntelHDR(bool activate = true); + void enableNvidiaHDR(bool activate = true); bool checkDecoderSupport(IDXGIAdapter* adapter); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); + void setHDRStream(); + void setHDROutPut(); int m_DecoderSelectionPass; + int m_AdapterIndex = 0; + int m_OutputIndex = 0; IDXGIFactory5* m_Factory; ID3D11Device* m_Device; IDXGISwapChain4* m_SwapChain; @@ -43,6 +59,68 @@ class D3D11VARenderer : public IFFmpegRenderer ID3D11RenderTargetView* m_RenderTargetView; SDL_mutex* m_ContextLock; + ID3D11VideoDevice* m_VideoDevice; + ID3D11VideoContext2* m_VideoContext; + CComPtr m_VideoProcessor; + CComPtr m_VideoProcessorEnumerator; + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC m_OutputViewDesc; + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC m_InputViewDesc; + D3D11_VIDEO_PROCESSOR_STREAM m_StreamData; + Microsoft::WRL::ComPtr m_OutputView; + Microsoft::WRL::ComPtr m_InputView; + ID3D11Resource* m_BackBufferResource; + VideoEnhancement* m_VideoEnhancement; + bool m_IsHDRenabled = false; + + // Variable unused, but keep it as reference for debugging purpose + DXGI_COLOR_SPACE_TYPE ColorSpaces[26] = { + DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709, // 0 - A + DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709, // 1 - A + DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709, // 2 - I * A + DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020, // 3 - I* + DXGI_COLOR_SPACE_RESERVED, // 4 + DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601, // 5 - O A + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601, // 6 - I A + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601, // 7 - O A + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 - I A + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 - A + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020, // 10 - I + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020, // 11 - O + DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020, // 12 - O O + DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 - I + DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 - I I* + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_TOPLEFT_P2020, // 15 - I + DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_TOPLEFT_P2020, // 16 - I + DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P2020, // 17 - I I* + DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020, // 18 - I + DXGI_COLOR_SPACE_YCBCR_FULL_GHLG_TOPLEFT_P2020, // 19 - I + DXGI_COLOR_SPACE_RGB_STUDIO_G24_NONE_P709, // 20 - I I* + DXGI_COLOR_SPACE_RGB_STUDIO_G24_NONE_P2020, // 21 - I* + DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_LEFT_P709, // 22 - I + DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_LEFT_P2020, // 23 - I I + DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_TOPLEFT_P2020, // 24 - I + DXGI_COLOR_SPACE_CUSTOM, // 25 + }; + + DXGI_COLOR_SPACE_TYPE m_StreamColorSpace = ColorSpaces[8]; // SDR-Only (HDR is 14) + DXGI_COLOR_SPACE_TYPE m_OutputColorSpace = ColorSpaces[12]; // SDR & HDR + + // [TODO] Remove the timer feature once the bug with VideoProcessorSetStreamColorSpace1 is fixed + bool setStreamColorSpace = true; + long startTime; + long nextTime; + int streamIndex = 0; + int increment = 100; + DXGI_COLOR_SPACE_TYPE StreamColorSpacesFixHDR[2] = { + DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 + DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 + }; + DXGI_COLOR_SPACE_TYPE StreamColorSpacesFixSDR[2] = { + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 + }; + ID3D11ShaderResourceView* m_VideoTextureResourceView; + DECODER_PARAMETERS m_DecoderParams; int m_DisplayWidth; int m_DisplayHeight; diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp index 5db52aefd..31a816be4 100644 --- a/app/streaming/video/videoenhancement.cpp +++ b/app/streaming/video/videoenhancement.cpp @@ -83,7 +83,7 @@ bool VideoEnhancement::setGPUinformation() // Set GPU information m_VendorId = adapterIdentifier.VendorId; m_GPUname = description; - m_DriverVersion = GetVideoDriverInfo(); + m_DriverVersion = getVideoDriverInfo(); qInfo() << "Active GPU: " << m_GPUname; qInfo() << "Video Driver: " << m_DriverVersion; @@ -108,7 +108,7 @@ bool VideoEnhancement::setGPUinformation() * * \return int Returns the Video driver version as an integer */ -int VideoEnhancement::GetVideoDriverInfo() +int VideoEnhancement::getVideoDriverInfo() { HKEY hKey = nullptr; diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 24d8e7502..9b42472e1 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -19,7 +19,7 @@ class VideoEnhancement : public QObject bool m_Initialized = false; bool m_Enabled = false; - bool m_UIvisible = true; // [Bruno] It should be false, and turn true by dxva2.cpp + bool m_UIvisible = false; // Vendors' name (PCI Special Interest Group) const int VENDOR_ID_AMD = 4098; @@ -49,7 +49,7 @@ class VideoEnhancement : public QObject VideoEnhancement& operator=(const VideoEnhancement&); bool setGPUinformation(); - int GetVideoDriverInfo(); + int getVideoDriverInfo(); public: static VideoEnhancement& getInstance(); From 2a1e2267eb9b08e7fe0676b4a977aec20d76e382 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 10 Feb 2024 08:33:52 +0800 Subject: [PATCH 03/53] Fix Mac build issue --- app/streaming/video/videoenhancement.cpp | 13 +++++++++---- app/streaming/video/videoenhancement.h | 3 +++ 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp index 31a816be4..6d6a8279b 100644 --- a/app/streaming/video/videoenhancement.cpp +++ b/app/streaming/video/videoenhancement.cpp @@ -1,13 +1,14 @@ #include "videoenhancement.h" #include +#include + +#ifdef Q_OS_WIN #include #include #include -#include - #include - #pragma comment(lib, "Advapi32.lib") +#endif /** * \brief Constructor (Singleton) @@ -98,7 +99,7 @@ bool VideoEnhancement::setGPUinformation() if (pDXGIDevice) pDXGIDevice->Release(); if (pAdapter) pAdapter->Release(); - #endif +#endif return success; } @@ -111,6 +112,8 @@ bool VideoEnhancement::setGPUinformation() int VideoEnhancement::getVideoDriverInfo() { +#ifdef Q_OS_WIN + HKEY hKey = nullptr; const wchar_t* SUBKEY = L"SYSTEM\\CurrentControlSet\\Control\\Video"; @@ -170,6 +173,8 @@ int VideoEnhancement::getVideoDriverInfo() } while (sta == ERROR_SUCCESS); RegCloseKey(hKey); +#endif + return m_DriverVersion; } diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 9b42472e1..864bed08f 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -6,8 +6,11 @@ #include #include #include + +#ifdef Q_OS_WIN #include #include +#endif class VideoEnhancement : public QObject { From b5061f38af4e60c5520bf6d90ed7487c8dbf5898 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 10 Feb 2024 09:43:10 +0800 Subject: [PATCH 04/53] Improve the rendering of HDR mode by removing visual glitches --- .../video/ffmpeg-renderers/d3d11va.cpp | 24 +++++++++++-------- .../video/ffmpeg-renderers/d3d11va.h | 1 + 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index f636ac337..3ce9682a4 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -101,6 +101,7 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_VideoProcessorEnumerator(nullptr), m_LastColorSpace(-1), m_LastFullRange(false), + m_LastServerHDR(LiGetCurrentHostDisplayHdrMode()), m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), m_AllowTearing(false), m_VideoGenericPixelShader(nullptr), @@ -736,14 +737,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; } - // [TODO] With NVIDIA RTX, while renderering using VideoProcessor with HDR activated in Moonlight, - // DXGI_FORMAT_R10G10B10A2_UNORM gives worse result than DXGI_FORMAT_R8G8B8A8_UNORM. - // Without this fix, HDR off on server renders gray screen and VSR is inactive (DXGI_COLOR_SPACE_TYPE type 8). - // For user perspective, it is better to not see such a bug, so for the moment I choose to force DXGI_FORMAT_R8G8B8A8_UNORM - if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_VideoEnhancement->isVendorNVIDIA()){ - swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; - } - // Use DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING with flip mode for non-vsync case, if possible. // NOTE: This is only possible in windowed or borderless windowed mode. if (!params->enableVsync) { @@ -1139,11 +1132,22 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) */ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) { - //Do Nothing when Moolight's HDR is disabled + //Do Nothing when Moolight's HDR is disabled as there is no space color issue for SDR, and in all cases HDR on server cannot work. if(!m_IsHDRenabled){ return; } + // We reload the renderer if the server HDR mode is changed + // This HDR auto set feature helps to have a SDR with VSR enabled, and a HDR without color space issue (strong banding issue) + // and with VSR disabled automatically to not have the white border color issue. + if(m_LastServerHDR != LiGetCurrentHostDisplayHdrMode()){ + m_LastServerHDR = LiGetCurrentHostDisplayHdrMode(); + SDL_Event event; + event.type = SDL_RENDER_TARGETS_RESET; + SDL_PushEvent(&event); + return; + } + bool frameFullRange = isFrameFullRange(frame); int frameColorSpace = getFrameColorspace(frame); @@ -1193,7 +1197,7 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) if(m_VideoEnhancement->isVendorNVIDIA()){ // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) - enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. + enableNvidiaVideoSuperResolution(false); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. } // Reset the fix HDR Stream setStreamColorSpace = true; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index bc10e4d89..38e5d4b44 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -126,6 +126,7 @@ class D3D11VARenderer : public IFFmpegRenderer int m_DisplayHeight; int m_LastColorSpace; bool m_LastFullRange; + bool m_LastServerHDR; AVColorTransferCharacteristic m_LastColorTrc; bool m_AllowTearing; From 7391f940c201cf2ffbb9e15f784310181f8febeb Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Wed, 7 Feb 2024 20:43:46 +0100 Subject: [PATCH 05/53] Add a new UI feature called "Video AI-Enhancement" (VAE) Changes made: 1. Creation of a new class VideoEnhancement which check the liability to the feature. 2. Add the checkbox "Video AI-Enhancement" in the "Basic Settings" groupbox. 3. Disable VAE when fullscreen is selected 4. Add a registery record 5. On the Overlay and the mention "AI-Enhanced" when activated 6. Add a command line for the class VideoEnhancement --- .gitignore | 1 + app/app.pro | 2 + app/cli/commandlineparser.cpp | 3 + app/gui/SettingsView.qml | 106 ++++++++ app/main.cpp | 10 + app/settings/streamingpreferences.cpp | 3 + app/settings/streamingpreferences.h | 6 + app/streaming/video/ffmpeg.cpp | 14 +- app/streaming/video/ffmpeg.h | 2 + app/streaming/video/videoenhancement.cpp | 313 +++++++++++++++++++++++ app/streaming/video/videoenhancement.h | 71 +++++ 11 files changed, 528 insertions(+), 3 deletions(-) create mode 100644 app/streaming/video/videoenhancement.cpp create mode 100644 app/streaming/video/videoenhancement.h diff --git a/.gitignore b/.gitignore index 112e91394..869b5a3a2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ **/.vs/ +**/.vscode/ build/ config.tests/*/.qmake.stash config.tests/*/Makefile diff --git a/app/app.pro b/app/app.pro index ff0269e65..e46f8e683 100644 --- a/app/app.pro +++ b/app/app.pro @@ -199,6 +199,7 @@ SOURCES += \ gui/sdlgamepadkeynavigation.cpp \ streaming/video/overlaymanager.cpp \ backend/systemproperties.cpp \ + streaming/video/videoenhancement.cpp \ wm.cpp HEADERS += \ @@ -207,6 +208,7 @@ HEADERS += \ cli/pair.h \ settings/compatfetcher.h \ settings/mappingfetcher.h \ + streaming/video/videoenhancement.h \ utils.h \ backend/computerseeker.h \ backend/identitymanager.h \ diff --git a/app/cli/commandlineparser.cpp b/app/cli/commandlineparser.cpp index 23fc65cd3..0d2b7a99e 100644 --- a/app/cli/commandlineparser.cpp +++ b/app/cli/commandlineparser.cpp @@ -470,6 +470,9 @@ void StreamCommandLineParser::parse(const QStringList &args, StreamingPreference // Resolve --frame-pacing and --no-frame-pacing options preferences->framePacing = parser.getToggleOptionValue("frame-pacing", preferences->framePacing); + // Resolve --video-enhancement and --no-video-enhancement options + preferences->videoEnhancement = parser.getToggleOptionValue("video-enhancement", preferences->videoEnhancement); + // Resolve --mute-on-focus-loss and --no-mute-on-focus-loss options preferences->muteOnFocusLoss = parser.getToggleOptionValue("mute-on-focus-loss", preferences->muteOnFocusLoss); diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index da6e310da..449964543 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -7,12 +7,16 @@ import StreamingPreferences 1.0 import ComputerManager 1.0 import SdlGamepadKeyNavigation 1.0 import SystemProperties 1.0 +import VideoEnhancement 1.0 Flickable { id: settingsPage objectName: qsTr("Settings") signal languageChanged() + signal displayModeChanged() + signal windowModeChanged() + signal videoEnhancementChanged() boundsBehavior: Flickable.OvershootBounds @@ -231,6 +235,9 @@ Flickable { recalculateWidth() lastIndexValue = currentIndex + + // Signal other controls + displayModeChanged() } id: resolutionComboBox @@ -292,6 +299,9 @@ Flickable { else { updateBitrateForSelection() } + + // Signal other controls + displayModeChanged() } NavigableDialog { @@ -310,6 +320,9 @@ Flickable { onClosed: { widthField.clear() heightField.clear() + + // Signal other controls + displayModeChanged() } onRejected: { @@ -759,9 +772,25 @@ Flickable { activated(currentIndex) } + // Video Super-Resolution does not work in exclusive full screen, so auto switch do borderless window + // [TODO] This may change according to what AMD and Intel will implement, if they can allow video enhancement in fullscreen + function checkVSR(){ + if(videoEnhancementCheck.checked && model.get(currentIndex).val === StreamingPreferences.WM_FULLSCREEN){ + for (var i = 0; i < model.count; i++) { + var thisWm = model.get(i).val; + if (model.get(i).val === StreamingPreferences.WM_FULLSCREEN_DESKTOP) { + currentIndex = i + break + } + } + activated(currentIndex) + } + } + Component.onCompleted: { reinitialize() languageChanged.connect(reinitialize) + videoEnhancementChanged.connect(checkVSR) } id: windowModeComboBox @@ -771,6 +800,8 @@ Flickable { textRole: "text" onActivated: { StreamingPreferences.windowMode = model.get(currentIndex).val + // Signal others + windowModeChanged() } ToolTip.delay: 1000 @@ -812,6 +843,81 @@ Flickable { ToolTip.visible: hovered ToolTip.text: qsTr("Frame pacing reduces micro-stutter by delaying frames that come in too early") } + + CheckBox { + id: videoEnhancementCheck + width: parent.width + hoverEnabled: true + text: qsTr("Video AI-Enhancement") + font.pointSize: 12 + visible: VideoEnhancement.isUIvisible() + enabled: true + checked: StreamingPreferences.videoEnhancement + property bool checkedSaved + + onCheckedChanged: { + StreamingPreferences.videoEnhancement = checked + // The value of checkedSaved is set while changing the WindowMode, need to find a way not to. + if(StreamingPreferences.windowMode !== StreamingPreferences.WM_FULLSCREEN){ + checkedSaved = checked + } + // Signal others + videoEnhancementChanged() + } + ToolTip.delay: 1000 + ToolTip.timeout: 5000 + ToolTip.visible: hovered + ToolTip.text: + qsTr("Enhance video quality by utilizing the GPU's AI-Enhancement capabilities.") + + qsTr("\nThis feature effectively upscales, reduces compression artifacts and enhances the clarity of streamed content.") + + qsTr("\nNote:") + + qsTr("\n - For optimal performance, use the software in borderless window mode; this feature is not applicable in fullscreen mode.") + + qsTr("\n - If available, ensure that appropriate settings, such as VSR (Virtual Super Resolution), are enabled in your GPU driver configurations.") + + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") + + function reinitialize() { + if(typeof(checkedSaved) === "undefined"){ + checkedSaved = checked + } + if(!VideoEnhancement.isUIvisible()){ + checked = false + checkedSaved = checked + visible = false + } + // If Exclusive fullscreen is selected, disabled the VSR as it does not work in this window mode + else if(StreamingPreferences.windowMode === StreamingPreferences.WM_FULLSCREEN){ + checked = false + } + else { + // Get back the saved status + checked = checkedSaved + } + // Indicate if the feature is available but not officially deployed by the Vendor + if(VideoEnhancement.isExperimental()){ + text = qsTr("Video AI-Enhancement (Experimental)") + } + } + + Timer { + id: vsrTimer + interval: 300 // 0 to make it async to get the final status of StreamingPreferences.windowMode (which is set too late in the process) + running: false // Don't start the timer immediately + repeat: false // Run only once + + onTriggered: { + parent.reinitialize() + } + } + + Component.onCompleted: { + checkedSaved = checked + reinitialize() + windowModeChanged.connect(() => { + checked = checkedSaved + vsrTimer.start() + }) + } + } } } diff --git a/app/main.cpp b/app/main.cpp index 67eac1d77..f01d2ffb8 100644 --- a/app/main.cpp +++ b/app/main.cpp @@ -43,6 +43,7 @@ #include "streaming/session.h" #include "settings/streamingpreferences.h" #include "gui/sdlgamepadkeynavigation.h" +#include "streaming/video/videoenhancement.h" #if !defined(QT_DEBUG) && defined(Q_OS_WIN32) // Log to file for release Windows builds @@ -642,6 +643,15 @@ int main(int argc, char *argv[]) [](QQmlEngine* qmlEngine, QJSEngine*) -> QObject* { return new StreamingPreferences(qmlEngine); }); + qmlRegisterSingletonType("VideoEnhancement", 1, 0, "VideoEnhancement", + [](QQmlEngine *engine, QJSEngine *scriptEngine) -> QObject* { + Q_UNUSED(engine) + Q_UNUSED(scriptEngine) + VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); + // Set the ownership to CppOwnership to avoid an error when QLM engine tries to delete the object + QQmlEngine::setObjectOwnership(videoEnhancement, QQmlEngine::CppOwnership); + return videoEnhancement; + }); // Create the identity manager on the main thread IdentityManager::get(); diff --git a/app/settings/streamingpreferences.cpp b/app/settings/streamingpreferences.cpp index 6a883bc08..05fe6c4fa 100644 --- a/app/settings/streamingpreferences.cpp +++ b/app/settings/streamingpreferences.cpp @@ -30,6 +30,7 @@ #define SER_ABSTOUCHMODE "abstouchmode" #define SER_STARTWINDOWED "startwindowed" #define SER_FRAMEPACING "framepacing" +#define SER_VIDEOENHANCEMENT "videoenhancement" #define SER_CONNWARNINGS "connwarnings" #define SER_UIDISPLAYMODE "uidisplaymode" #define SER_RICHPRESENCE "richpresence" @@ -93,6 +94,7 @@ void StreamingPreferences::reload() absoluteMouseMode = settings.value(SER_ABSMOUSEMODE, false).toBool(); absoluteTouchMode = settings.value(SER_ABSTOUCHMODE, true).toBool(); framePacing = settings.value(SER_FRAMEPACING, false).toBool(); + videoEnhancement = settings.value(SER_VIDEOENHANCEMENT, false).toBool(); connectionWarnings = settings.value(SER_CONNWARNINGS, true).toBool(); richPresence = settings.value(SER_RICHPRESENCE, true).toBool(); gamepadMouse = settings.value(SER_GAMEPADMOUSE, true).toBool(); @@ -275,6 +277,7 @@ void StreamingPreferences::save() settings.setValue(SER_ABSMOUSEMODE, absoluteMouseMode); settings.setValue(SER_ABSTOUCHMODE, absoluteTouchMode); settings.setValue(SER_FRAMEPACING, framePacing); + settings.setValue(SER_VIDEOENHANCEMENT, videoEnhancement); settings.setValue(SER_CONNWARNINGS, connectionWarnings); settings.setValue(SER_RICHPRESENCE, richPresence); settings.setValue(SER_GAMEPADMOUSE, gamepadMouse); diff --git a/app/settings/streamingpreferences.h b/app/settings/streamingpreferences.h index 0e2cf365e..49558ae3b 100644 --- a/app/settings/streamingpreferences.h +++ b/app/settings/streamingpreferences.h @@ -116,6 +116,8 @@ class StreamingPreferences : public QObject Q_PROPERTY(bool absoluteMouseMode MEMBER absoluteMouseMode NOTIFY absoluteMouseModeChanged) Q_PROPERTY(bool absoluteTouchMode MEMBER absoluteTouchMode NOTIFY absoluteTouchModeChanged) Q_PROPERTY(bool framePacing MEMBER framePacing NOTIFY framePacingChanged) + Q_PROPERTY(bool videoEnhancement MEMBER videoEnhancement NOTIFY videoEnhancementChanged) + Q_PROPERTY(bool videoEnhancementVisible MEMBER videoEnhancementVisible NOTIFY videoEnhancementVisibleChanged) Q_PROPERTY(bool connectionWarnings MEMBER connectionWarnings NOTIFY connectionWarningsChanged) Q_PROPERTY(bool richPresence MEMBER richPresence NOTIFY richPresenceChanged) Q_PROPERTY(bool gamepadMouse MEMBER gamepadMouse NOTIFY gamepadMouseChanged) @@ -152,6 +154,8 @@ class StreamingPreferences : public QObject bool absoluteMouseMode; bool absoluteTouchMode; bool framePacing; + bool videoEnhancement; + bool videoEnhancementVisible; bool connectionWarnings; bool richPresence; bool gamepadMouse; @@ -192,6 +196,8 @@ class StreamingPreferences : public QObject void uiDisplayModeChanged(); void windowModeChanged(); void framePacingChanged(); + void videoEnhancementChanged(); + void videoEnhancementVisibleChanged(); void connectionWarningsChanged(); void richPresenceChanged(); void gamepadMouseChanged(); diff --git a/app/streaming/video/ffmpeg.cpp b/app/streaming/video/ffmpeg.cpp index 6ecc6c88f..4453620f2 100644 --- a/app/streaming/video/ffmpeg.cpp +++ b/app/streaming/video/ffmpeg.cpp @@ -221,7 +221,8 @@ FFmpegVideoDecoder::FFmpegVideoDecoder(bool testOnly) m_VideoFormat(0), m_NeedsSpsFixup(false), m_TestOnly(testOnly), - m_DecoderThread(nullptr) + m_DecoderThread(nullptr), + m_VideoEnhancement(&VideoEnhancement::getInstance()) { SDL_zero(m_ActiveWndVideoStats); SDL_zero(m_LastWndVideoStats); @@ -709,15 +710,22 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i break; } + // Display if AI-Enhancement is enabled + const char* aiEnhanced = ""; + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + aiEnhanced = "AI-Enhanced"; + } + if (stats.receivedFps > 0) { if (m_VideoDecoderCtx != nullptr) { ret = snprintf(&output[offset], length - offset, - "Video stream: %dx%d %.2f FPS (Codec: %s)\n", + "Video stream: %dx%d %.2f FPS (Codec: %s) %s\n", m_VideoDecoderCtx->width, m_VideoDecoderCtx->height, stats.totalFps, - codecString); + codecString, + aiEnhanced); if (ret < 0 || ret >= length - offset) { SDL_assert(false); return; diff --git a/app/streaming/video/ffmpeg.h b/app/streaming/video/ffmpeg.h index eb3c1812b..536446f1e 100644 --- a/app/streaming/video/ffmpeg.h +++ b/app/streaming/video/ffmpeg.h @@ -6,6 +6,7 @@ #include "decoder.h" #include "ffmpeg-renderers/renderer.h" #include "ffmpeg-renderers/pacer/pacer.h" +#include "streaming/video/videoenhancement.h" extern "C" { #include @@ -94,6 +95,7 @@ class FFmpegVideoDecoder : public IVideoDecoder { bool m_TestOnly; SDL_Thread* m_DecoderThread; SDL_atomic_t m_DecoderThreadShouldQuit; + VideoEnhancement* m_VideoEnhancement; // Data buffers in the queued DU are not valid QQueue m_FrameInfoQueue; diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp new file mode 100644 index 000000000..5db52aefd --- /dev/null +++ b/app/streaming/video/videoenhancement.cpp @@ -0,0 +1,313 @@ +#include "videoenhancement.h" +#include +#include +#include +#include +#include + +#include + +#pragma comment(lib, "Advapi32.lib") + +/** + * \brief Constructor (Singleton) + * + * Check the capacity to handle the AI-Enhancement features such as Video Super-Resolution or SDR to HDR, according to multiple parameters such as OS or Video driver. + * + * \return void + */ +VideoEnhancement::VideoEnhancement() +{ + if(!m_Initialized){ + setGPUinformation(); + // Avoid to set variables every call of the instance + m_Initialized = true; + } +} + +/** + * \brief Get the singleton instance + * + * Render the instance of the singleton + * + * \return VideoEnhancement instance + */ +VideoEnhancement &VideoEnhancement::getInstance() +{ + static VideoEnhancement instance; + return instance; +} + +/** + * \brief Retreive GPU information + * + * Retreive all GPU information: Vendor ID, Driver version, GPU name + * + * \return bool Returns true if it successfully retreived the GPU information + */ +bool VideoEnhancement::setGPUinformation() +{ + bool success = false; + +#ifdef Q_OS_WIN + + // Create a Direct3D 11 device + ID3D11Device* pD3DDevice = nullptr; + ID3D11DeviceContext* pContext = nullptr; + + HRESULT hr = D3D11CreateDevice( + nullptr, + D3D_DRIVER_TYPE_HARDWARE, + nullptr, + D3D11_CREATE_DEVICE_DEBUG, + nullptr, + 0, + D3D11_SDK_VERSION, + &pD3DDevice, + nullptr, + &pContext + ); + + IDXGIAdapter* pAdapter = nullptr; + IDXGIDevice* pDXGIDevice = nullptr; + // Get the DXGI device from the D3D11 device. + // It identifies which GPU is being used by the application in case of multiple one (like a iGPU with a dedicated GPU). + if (SUCCEEDED(hr) && SUCCEEDED(pD3DDevice->QueryInterface(__uuidof(IDXGIDevice), (void**)&pDXGIDevice))) { + // Get the DXGI adapter from the DXGI device + if (SUCCEEDED(pDXGIDevice->GetAdapter(&pAdapter))) { + DXGI_ADAPTER_DESC adapterIdentifier; + if (SUCCEEDED(pAdapter->GetDesc(&adapterIdentifier))) { + // Convert wchar[128] to string + std::wstring description(adapterIdentifier.Description); + + // Set GPU information + m_VendorId = adapterIdentifier.VendorId; + m_GPUname = description; + m_DriverVersion = GetVideoDriverInfo(); + + qInfo() << "Active GPU: " << m_GPUname; + qInfo() << "Video Driver: " << m_DriverVersion; + + } + } + + } + + // Release resources + if (pD3DDevice) pD3DDevice->Release(); + if (pDXGIDevice) pDXGIDevice->Release(); + if (pAdapter) pAdapter->Release(); + + #endif + + return success; +} + +/** + * \brief Get the Video driver version + * + * \return int Returns the Video driver version as an integer + */ +int VideoEnhancement::GetVideoDriverInfo() +{ + + HKEY hKey = nullptr; + const wchar_t* SUBKEY = L"SYSTEM\\CurrentControlSet\\Control\\Video"; + + if (ERROR_SUCCESS != RegOpenKeyExW(HKEY_LOCAL_MACHINE, SUBKEY, 0, KEY_ENUMERATE_SUB_KEYS, &hKey)) + return m_DriverVersion; + + LSTATUS sta = ERROR_SUCCESS; + wchar_t keyName[128] = {}; + DWORD index = 0; + DWORD len; + + do + { + len = sizeof(keyName) / sizeof(wchar_t); + sta = RegEnumKeyExW(hKey, index, keyName, &len, nullptr, nullptr, nullptr, nullptr); + index++; + + if (sta != ERROR_SUCCESS) + continue; + + std::wstring subkey(SUBKEY); + subkey.append(L"\\"); + subkey.append(keyName); + subkey.append(L"\\"); + subkey.append(L"0000"); + DWORD lg; + + wchar_t desc[128] = {}; + lg = sizeof(desc) / sizeof(wchar_t); + if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, subkey.c_str(), L"DriverDesc", + RRF_RT_REG_SZ, nullptr, desc, &lg)) + continue; + + std::wstring s_desc(desc); + if (s_desc != m_GPUname) + continue; + + // Driver of interest found, we read version + wchar_t charVersion[64] = {}; + lg = sizeof(charVersion) / sizeof(wchar_t); + if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, subkey.c_str(), L"DriverVersion", + RRF_RT_REG_SZ, nullptr, charVersion, &lg)) + continue; + + std::wstring strVersion(charVersion); + + // Convert driver store version to Nvidia version + if (isVendorNVIDIA()) // NVIDIA + { + strVersion = std::regex_replace(strVersion, std::wregex(L"\\."), L""); + m_DriverVersion = std::stoi(strVersion.substr(strVersion.length() - 5, 5)); + } + else // AMD/Intel/WDDM + { + m_DriverVersion = std::stoi(strVersion.substr(0, strVersion.find('.'))); + } + } while (sta == ERROR_SUCCESS); + RegCloseKey(hKey); + + return m_DriverVersion; +} + +/** + * \brief Check if the vendor is AMD + * + * \return bool Returns true is the vendor is AMD + */ +bool VideoEnhancement::isVendorAMD(){ + return m_VendorId == VENDOR_ID_AMD; +} + +/** + * \brief Check if the vendor is Intel + * + * \return bool Returns true is the vendor is Intel + */ +bool VideoEnhancement::isVendorIntel(){ + return m_VendorId == VENDOR_ID_INTEL; +} + +/** + * \brief Check if the vendor is NVIDIA + * + * \return bool Returns true is the vendor is NVIDIA + */ +bool VideoEnhancement::isVendorNVIDIA(){ + return m_VendorId == VENDOR_ID_NVIDIA; +} + +/** + * \brief Check the Video Super-Resolution capability + * + * Check if the GPU used is capable of providing VSR feature according to its serie or driver version + * + * \return bool Returns true if the VSR feature is available + */ +bool VideoEnhancement::isVSRcapable(){ + if(isVendorAMD()){ + // [TODO] To be done once AMD provides the VSR solution + // Driver > 24 && RX 7000+ + } else if(isVendorIntel()){ + // All CPU with iGPU (Gen 10th), or dedicated GPU, are capable + if(m_DriverVersion >= MIN_VSR_DRIVER_VERSION_INTEL){ + return true; + } + } else if(isVendorNVIDIA()){ + // RTX VSR v1.5 supports NVIDIA RTX Series 20 starting from the Windows drive 545.84 (Oct 17, 2023). + if( + m_GPUname.find(L"RTX") != std::wstring::npos + && m_DriverVersion >= MIN_VSR_DRIVER_VERSION_NVIDIA + ){ + return true; + } + } + return false; +} + +/** + * \brief Check the HDR capability + * + * Check if the GPU used is capable of providing SDR to HDR feature according to its serie or driver version + * + * \return bool Returns true if the HDR feature is available + */ +bool VideoEnhancement::isHDRcapable(){ + if(isVendorAMD()){ + // Not yet announced by AMD + } else if(isVendorIntel()){ + // Not yet announced by Intel + } else if(isVendorNVIDIA()){ + // RTX VSR v1.5 supports NVIDIA RTX Series 20 starting from the Windows drive 545.84 (Oct 17, 2023). + if( + m_GPUname.find(L"RTX") != std::wstring::npos + && m_DriverVersion >= MIN_HDR_DRIVER_VERSION_NVIDIA + ){ + return true; + } + } + return false; +} + +/** + * \brief Check the AI-Enhancement capability + * + * Check if the GPU used is capable of enhancing the video + * + * \return bool Returns true if the such capability is available + */ +bool VideoEnhancement::isEnhancementCapable(){ + return isVSRcapable() || isHDRcapable(); +} + +/** + * \brief Check if Video Enhancement feature is enabled + * + * \return bool Returns true if the Video Enhancement feature is enabled + */ +bool VideoEnhancement::isVideoEnhancementEnabled(){ + return m_Enabled; +} + +/** + * \brief Enable Video Enhancement feature + * + * \param bool activate Default is true, at true it enables the use of Video Enhancement feature + * \return bool Returns true if the Video Enhancement feature is available + */ +bool VideoEnhancement::enableVideoEnhancement(bool activate){ + m_Enabled = isEnhancementCapable() && activate; + return m_Enabled; +} + +/** + * \brief Enable Video Enhancement accessibility from the settings interface + * + * \param bool visible Default is true, at true it displays Video Enhancement feature + * \return bool Returns true if the Video Enhancement feature is available + */ +void VideoEnhancement::enableUIvisible(bool visible){ + m_UIvisible = visible; +} + +/** + * \brief Check if Video Enhancement feature is accessible from the settings interface + * + * \return bool Returns true if the Video Enhancement feature is accessible + */ +bool VideoEnhancement::isUIvisible(){ + return m_UIvisible; +} + +/** + * \brief Check if Video Enhancement feature is experimental from the vendor + * + * \return bool Returns true if the Video Enhancement feature is experimental + */ +bool VideoEnhancement::isExperimental(){ + // [Jan 31st 2024] AMD's is not yet available, Intel's is experimental, NVIDIA's is official + return isVendorIntel(); +} diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h new file mode 100644 index 000000000..24d8e7502 --- /dev/null +++ b/app/streaming/video/videoenhancement.h @@ -0,0 +1,71 @@ +#ifndef VIDEOENHANCEMENT_H +#define VIDEOENHANCEMENT_H + +#pragma once + +#include +#include +#include +#include +#include + +class VideoEnhancement : public QObject +{ + Q_OBJECT + +private: + + static VideoEnhancement* instance; + + bool m_Initialized = false; + bool m_Enabled = false; + bool m_UIvisible = true; // [Bruno] It should be false, and turn true by dxva2.cpp + + // Vendors' name (PCI Special Interest Group) + const int VENDOR_ID_AMD = 4098; + const int VENDOR_ID_INTEL = 32902; + const int VENDOR_ID_NVIDIA = 4318; + + // Minimum driver version accepted for VSR feature + const int MIN_VSR_DRIVER_VERSION_AMD = 24; // It is implemented from the driver 24.1.1 + const int MIN_VSR_DRIVER_VERSION_INTEL = 28; // It will ensure to cover 27.20 version + const int MIN_VSR_DRIVER_VERSION_NVIDIA = 54584; // NVIDIA driver name are always in the format XXX.XX (https://www.nvidia.com/en-gb/drivers/drivers-faq/) + + // Minimum driver version accepted for HDR feature + const int MIN_HDR_DRIVER_VERSION_AMD = 0; // To be determined, this feature has not yet been announced by AMD + const int MIN_HDR_DRIVER_VERSION_INTEL = 0; // To be determined, this feature has not yet been announced by Intel + const int MIN_HDR_DRIVER_VERSION_NVIDIA = 55123; // https://www.nvidia.com/download/driverResults.aspx/218114/en-us/ + + // GPU information + int m_VendorId = 0; + std::wstring m_GPUname = L"Unknown"; + int m_DriverVersion = 0; + + // Disable the constructor from outside to avoid a new instance + VideoEnhancement(); + + // Private copy constructor and assignment operator to prevent duplication + VideoEnhancement(const VideoEnhancement&); + VideoEnhancement& operator=(const VideoEnhancement&); + + bool setGPUinformation(); + int GetVideoDriverInfo(); + +public: + static VideoEnhancement& getInstance(); + bool isVendorAMD(); + bool isVendorIntel(); + bool isVendorNVIDIA(); + bool isEnhancementCapable(); + bool isVSRcapable(); + bool isHDRcapable(); + bool isVideoEnhancementEnabled(); + bool enableVideoEnhancement(bool activate = true); + void enableUIvisible(bool visible = true); + + Q_INVOKABLE bool isUIvisible(); + Q_INVOKABLE bool isExperimental(); + +}; + +#endif // VIDEOENHANCEMENT_H From 89b64855d64850edcb6dc07902bf0c29339c635b Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Thu, 8 Feb 2024 02:45:22 +0100 Subject: [PATCH 06/53] Add AI Super Resolution for NVIDIA and Intel GPUs Adding VideoProcessor to D3D11VA to offline video processing from CPU to GPU, and leveraging additional GPU capabilities such as AI enhancement for the upscaling and some filtering. Changes made: 1. VideoProcessor is used to render the frame only when "Video AI-Enhancement" is enabled; when disabled, the whole process is unchanged. 2. Add methods to enable the Video Super Resolution for NVIDIA, and Intel. AMD method is currently empty, need to POC the solution with the AMF documentation. 3. Add methods to enable SDR to HDR. Currently only NVIDIA has such feature, but the code place is prepared if Intel and AMD will too. 4. Some existing variables local to a method (like BackBufferResource) changed to global scope to be consumed be also VideoProcessor methods. 5. In ::initialize(), the application checks if the system is capable of leveraging GPU AI enhancement, if yes, it inform the UI to display the feature. 6. ColorSpace setups (Source/Stream) for HDR are not optimal, further improvment might be possible. Issues observed are commented in the code at relevant places. --- .../video/ffmpeg-renderers/d3d11va.cpp | 747 +++++++++++++++++- .../video/ffmpeg-renderers/d3d11va.h | 80 +- app/streaming/video/videoenhancement.cpp | 4 +- app/streaming/video/videoenhancement.h | 4 +- 4 files changed, 814 insertions(+), 21 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 1ac76d019..f636ac337 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -7,13 +7,28 @@ #include "streaming/streamutils.h" #include "streaming/session.h" +#include "settings/streamingpreferences.h" +#include "streaming/video/videoenhancement.h" + +#include +#include +#include +#include +#include + +extern "C" { +#include +} #include #include #include -#define SAFE_COM_RELEASE(x) if (x) { (x)->Release(); } +#pragma comment(lib, "d3d11.lib") +#pragma comment(lib, "dxgi.lib") + +#define SAFE_COM_RELEASE(x) if (x) { (x)->Release(); (x) = nullptr; } typedef struct _VERTEX { @@ -80,6 +95,10 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_SwapChain(nullptr), m_DeviceContext(nullptr), m_RenderTargetView(nullptr), + m_VideoDevice(nullptr), + m_VideoContext(nullptr), + m_VideoProcessor(nullptr), + m_VideoProcessorEnumerator(nullptr), m_LastColorSpace(-1), m_LastFullRange(false), m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), @@ -101,6 +120,8 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_ContextLock = SDL_CreateMutex(); DwmEnableMMCSS(TRUE); + + m_VideoEnhancement = &VideoEnhancement::getInstance(); } D3D11VARenderer::~D3D11VARenderer() @@ -134,6 +155,8 @@ D3D11VARenderer::~D3D11VARenderer() SAFE_COM_RELEASE(m_OverlayPixelShader); + SAFE_COM_RELEASE(m_BackBufferResource); + SAFE_COM_RELEASE(m_RenderTargetView); SAFE_COM_RELEASE(m_SwapChain); @@ -150,11 +173,130 @@ D3D11VARenderer::~D3D11VARenderer() else { SAFE_COM_RELEASE(m_Device); SAFE_COM_RELEASE(m_DeviceContext); + SAFE_COM_RELEASE(m_VideoDevice); + SAFE_COM_RELEASE(m_VideoContext); } SAFE_COM_RELEASE(m_Factory); } +/** + * \brief Set Monitor HDR MetaData information + * + * Get the Monitor HDT MetaData via LimeLight library + * + * \param PSS_HDR_METADATA* HDRMetaData The varaible to set the metadata information + * \return bool Return True is succeed + */ +void D3D11VARenderer::setHDRStream(){ + DXGI_HDR_METADATA_HDR10 streamHDRMetaData; + // Prepare HDR Meta Data for Stream content + SS_HDR_METADATA hdrMetadata; + // if (m_VideoProcessor.p && m_IsHDRenabled && LiGetHdrMetadata(&hdrMetadata)) { + if (m_VideoProcessor.p && LiGetHdrMetadata(&hdrMetadata)) { + + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kMinLuminanceFixedPoint = 10000; + + streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; + streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; + streamHDRMetaData.GreenPrimary[0] = hdrMetadata.displayPrimaries[1].x; + streamHDRMetaData.GreenPrimary[1] = hdrMetadata.displayPrimaries[1].y; + streamHDRMetaData.BluePrimary[0] = hdrMetadata.displayPrimaries[2].x; + streamHDRMetaData.BluePrimary[1] = hdrMetadata.displayPrimaries[2].y; + streamHDRMetaData.WhitePoint[0] = hdrMetadata.whitePoint.x; + streamHDRMetaData.WhitePoint[1] = hdrMetadata.whitePoint.y; + streamHDRMetaData.MaxMasteringLuminance = hdrMetadata.maxDisplayLuminance * kMinLuminanceFixedPoint; + streamHDRMetaData.MinMasteringLuminance = hdrMetadata.minDisplayLuminance; + + streamHDRMetaData.MaxContentLightLevel = hdrMetadata.maxContentLightLevel; + streamHDRMetaData.MaxFrameAverageLightLevel = hdrMetadata.maxFrameAverageLightLevel; + if(streamHDRMetaData.MaxContentLightLevel == 0){ + streamHDRMetaData.MaxContentLightLevel = streamHDRMetaData.MaxFrameAverageLightLevel; + } + + // [TODO] (Source: https://github.com/xbmc) For AMD/HDR, + // we apparently need to do a custom tone (MaxMasteringLuminance=10000, MinMasteringLuminance=0). + // Yet to be verified + // if(m_VideoEnhancement->isVendorAMD()){ + // m_StreamHDRMetaData.MaxMasteringLuminance = 10000; + // m_StreamHDRMetaData.MinMasteringLuminance = 0; + // } + + // Set HDR Stream (input) Meta data + m_VideoContext->VideoProcessorSetStreamHDRMetaData( + m_VideoProcessor, + 0, + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &streamHDRMetaData + ); + } +} + +/** + * \brief Set Monitor HDR MetaData information + * + * Get the Monitor HDT MetaData via LimeLight library + * + * \param PSS_HDR_METADATA* HDRMetaData The varaible to set the metadata information + * \return bool Return True is succeed + */ +void D3D11VARenderer::setHDROutPut(){ + + DXGI_HDR_METADATA_HDR10 outputHDRMetaData; + + // Find the monitor attached to the application + Microsoft::WRL::ComPtr pOutput; + if (SUCCEEDED(m_SwapChain->GetContainingOutput(&pOutput))) { + Microsoft::WRL::ComPtr pOutput6; + if (SUCCEEDED(pOutput.As(&pOutput6))){ + DXGI_OUTPUT_DESC1 desc1 {}; + if (SUCCEEDED(pOutput6->GetDesc1(&desc1))){ + // Get Monitor ColorSpace for SDR and HDR (if the monitor is capable of HDR) + m_OutputColorSpace = desc1.ColorSpace; + + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kPrimariesFixedPoint = 50000; + static constexpr int kMinLuminanceFixedPoint = 10000; + + // Format Monitor HDR MetaData + outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; + outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; + outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; + // MaxContentLightLevel is not available in DXGI_OUTPUT_DESC1 structure + // https://learn.microsoft.com/fr-fr/windows/win32/api/dxgi1_6/ns-dxgi1_6-dxgi_output_desc1 + // But MaxContentLightLevel is not needed and greater or equal to MaxFullFrameLuminance, so it is safe to set a minimum for it + // https://professionalsupport.dolby.com/s/article/Calculation-of-MaxFALL-and-MaxCLL-metadata + // Also note that these are not fixed-point. + outputHDRMetaData.MaxContentLightLevel = desc1.MaxFullFrameLuminance; + outputHDRMetaData.MaxFrameAverageLightLevel = desc1.MaxFullFrameLuminance; + } + } + } + + if (m_VideoProcessor.p) { + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor, + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + } + + m_SwapChain->SetHDRMetaData(DXGI_HDR_METADATA_TYPE_HDR10, sizeof(outputHDRMetaData), &outputHDRMetaData); +} + bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) { bool success = false; @@ -218,11 +360,33 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } + // Get video device + if (!m_VideoDevice) { + hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), + (void**)&m_VideoDevice); + if (FAILED(hr)) { + return false; + } + } + + // Get video context + if (!m_VideoContext) { + hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), + (void**)&m_VideoContext); + if (FAILED(hr)) { + return false; + } + } + if (!checkDecoderSupport(adapter)) { m_DeviceContext->Release(); m_DeviceContext = nullptr; m_Device->Release(); m_Device = nullptr; + m_VideoContext->Release(); + m_VideoContext = nullptr; + m_VideoDevice->Release(); + m_VideoDevice = nullptr; goto Exit; } @@ -237,9 +401,242 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter return success; } +/** + * \brief Enable Video Super-Resolution for AMD GPU + * + * This feature is available starting from AMD series 7000 and driver AMD Software 24.1.1 (Jan 23, 2024) + * https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + * + * \return void + */ +void D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ + // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 check how to implement it + // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + if(m_VideoEnhancement->isVendorAMD() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ + // [TODO] Implement AMD Video Scaler + // Documentation and DX11 code sample + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_VQ_Enhancer_API.md + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp + } +} + +/** + * \brief Enable Video Super-Resolution for Intel GPU + * + * This experimental feature from Intel is available starting from Intel iGPU from CPU Gen 10th (Skylake) and Intel graphics driver 27.20.100.8681 (Sept 15, 2020) + * Only Arc GPUs seem to provide visual improvement + * https://www.techpowerup.com/305558/intel-outs-video-super-resolution-for-chromium-browsers-works-with-igpus-11th-gen-onward + * + * \return void + */ +void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ + HRESULT hr; + + if(m_VideoEnhancement->isVendorIntel() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ + + constexpr GUID GUID_INTEL_VPE_INTERFACE = {0xedd1d4b9, 0x8659, 0x4cbc, {0xa4, 0xd6, 0x98, 0x31, 0xa2, 0x16, 0x3a, 0xc3}}; + constexpr UINT kIntelVpeFnVersion = 0x01; + constexpr UINT kIntelVpeFnMode = 0x20; + constexpr UINT kIntelVpeFnScaling = 0x37; + constexpr UINT kIntelVpeVersion3 = 0x0003; + constexpr UINT kIntelVpeModeNone = 0x0; + constexpr UINT kIntelVpeModePreproc = 0x01; + constexpr UINT kIntelVpeScalingDefault = 0x0; + constexpr UINT kIntelVpeScalingSuperResolution = 0x2; + + UINT param = 0; + + struct IntelVpeExt + { + UINT function; + void* param; + }; + + IntelVpeExt ext{0, ¶m}; + + ext.function = kIntelVpeFnVersion; + param = kIntelVpeVersion3; + + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE version failed: %x", + hr); + return; + } + + ext.function = kIntelVpeFnMode; + if(activate){ + param = kIntelVpeModePreproc; + } else { + param = kIntelVpeModeNone; + } + + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE mode failed: %x", + hr); + return; + } + + ext.function = kIntelVpeFnScaling; + if(activate){ + param = kIntelVpeScalingSuperResolution; + } else { + param = kIntelVpeScalingDefault; + } + + hr = m_VideoContext->VideoProcessorSetStreamExtension( + m_VideoProcessor, 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel Video Super Resolution failed: %x", + hr); + return; + } + + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); + } + } +} + +/** + * \brief Enable Video Super-Resolution for NVIDIA + * + * This feature is available starting from series NVIDIA RTX 2000 and GeForce driver 545.84 (Oct 17, 2023) + * + * IMPORTANT (Feb 5th, 2024): RTX VSR seems to be limited to SDR content only, + * it does add a grey filter if it is activated while HDR is on on stream (Host setting does not impact it). + * It might be fixed later by NVIDIA, but the temporary solution is to disable the feature when Stream content is HDR-on + * + * \return void + */ +void D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ + HRESULT hr; + + + if(m_VideoEnhancement->isVendorNVIDIA() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ + + // Toggle VSR + constexpr GUID GUID_NVIDIA_PPE_INTERFACE = {0xd43ce1b3, 0x1f4b, 0x48ac, {0xba, 0xee, 0xc3, 0xc2, 0x53, 0x75, 0xe6, 0xf7}}; + constexpr UINT kStreamExtensionVersionV1 = 0x1; + constexpr UINT kStreamExtensionMethodSuperResolution = 0x2; + + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable; + }; + + // Convert bool to UINT + UINT enable = activate; + + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV1, kStreamExtensionMethodSuperResolution, enable}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor, 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX Video Super Resolution failed: %x", + hr); + return; + } + + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); + } + } +} + +/** + * \brief Enable HDR for AMD GPU + * + * This feature is not availble for AMD, and has not yet been announced (by Jan 24th, 2024) + * + * \return void + */ +void D3D11VARenderer::enableAMDHDR(bool activate){ + if(m_VideoEnhancement->isVendorAMD() && m_VideoEnhancement->isHDRcapable()){ + // [TODO] Feature not yet announced + } +} + +/** + * \brief Enable HDR for Intel GPU + * + * This feature is not availble for Intel, and has not yet been announced (by Jan 24th, 2024) + * + * \return void + */ +void D3D11VARenderer::enableIntelHDR(bool activate){ + if(m_VideoEnhancement->isVendorIntel() && m_VideoEnhancement->isHDRcapable()){ + // [TODO] Feature not yet announced + } +} + +/** + * \brief Enable HDR for NVIDIA + * + * This feature is available starting from series NVIDIA RTX 2000 and GeForce driver 545.84 (Oct 17, 2023) + * + * Note: Even if the feature is enabled, I could not find any settings of ColorSpace and DXG8Format which + * can work without having the screen darker. Here are what I found: + * 1) Moonlight HDR: Checked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_P010 => SDR convert to HDR, but with darker rendering + * 2) Moonlight HDR: Unchecked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_NV12 => SDR convert to HDR, but with darker rendering + * + * \return void + */ +void D3D11VARenderer::enableNvidiaHDR(bool activate){ + HRESULT hr; + + if(m_VideoEnhancement->isVendorNVIDIA() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isHDRcapable()){ + + // Toggle HDR + constexpr GUID GUID_NVIDIA_TRUE_HDR_INTERFACE = {0xfdd62bb4, 0x620b, 0x4fd7, {0x9a, 0xb3, 0x1e, 0x59, 0xd0, 0xd5, 0x44, 0xb3}}; + constexpr UINT kStreamExtensionVersionV4 = 0x4; + constexpr UINT kStreamExtensionMethodTrueHDR = 0x3; + + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable : 1; + UINT reserved : 31; + }; + + // Convert bool to UINT + UINT enable = activate; + + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV4, kStreamExtensionMethodTrueHDR, enable, 0u}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor, 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX HDR failed: %x", + hr); + return; + } + + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); + } + } +} + bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) { - int adapterIndex, outputIndex; HRESULT hr; m_DecoderParams = *params; @@ -258,7 +655,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } if (!SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(params->window), - &adapterIndex, &outputIndex)) { + &m_AdapterIndex, &m_OutputIndex)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "SDL_DXGIGetOutputInfo() failed: %s", SDL_GetError()); @@ -275,12 +672,12 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // First try the adapter corresponding to the display where our window resides. // This will let us avoid a copy if the display GPU has the required decoder. - if (!createDeviceByAdapterIndex(adapterIndex)) { + if (!createDeviceByAdapterIndex(m_AdapterIndex)) { // If that didn't work, we'll try all GPUs in order until we find one // or run out of GPUs (DXGI_ERROR_NOT_FOUND from EnumAdapters()) bool adapterNotFound = false; for (int i = 0; !adapterNotFound; i++) { - if (i == adapterIndex) { + if (i == m_AdapterIndex) { // Don't try the same GPU again continue; } @@ -339,6 +736,14 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; } + // [TODO] With NVIDIA RTX, while renderering using VideoProcessor with HDR activated in Moonlight, + // DXGI_FORMAT_R10G10B10A2_UNORM gives worse result than DXGI_FORMAT_R8G8B8A8_UNORM. + // Without this fix, HDR off on server renders gray screen and VSR is inactive (DXGI_COLOR_SPACE_TYPE type 8). + // For user perspective, it is better to not see such a bug, so for the moment I choose to force DXGI_FORMAT_R8G8B8A8_UNORM + if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_VideoEnhancement->isVendorNVIDIA()){ + swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; + } + // Use DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING with flip mode for non-vsync case, if possible. // NOTE: This is only possible in windowed or borderless windowed mode. if (!params->enableVsync) { @@ -435,6 +840,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // AVHWDeviceContext takes ownership of these objects d3d11vaDeviceContext->device = m_Device; d3d11vaDeviceContext->device_context = m_DeviceContext; + d3d11vaDeviceContext->video_device = m_VideoDevice; + d3d11vaDeviceContext->video_context = m_VideoContext; // Set lock functions that we will use to synchronize with FFmpeg's usage of our device context d3d11vaDeviceContext->lock = lockContext; @@ -455,6 +862,47 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } + // Check if the GPU is capable of AI-Enhancement + // This capability setup is place in this method because it is only available on FFmpeg with DirectX for hardware acceleration + m_VideoEnhancement->enableVideoEnhancement(false); + if(m_VideoEnhancement->isEnhancementCapable()){ + + // Enable the visibility of Video enhancement feature + m_VideoEnhancement->enableUIvisible(); + + StreamingPreferences streamingPreferences; + if(streamingPreferences.videoEnhancement){ + + if(createVideoProcessor()){ + m_VideoEnhancement->enableVideoEnhancement(true); + } + + // Enable VSR feature if available + if(m_VideoEnhancement->isVSRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaVideoSuperResolution(); + } + } + + // Enable SDR->HDR feature if available + if(m_VideoEnhancement->isHDRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDHDR(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelHDR(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaHDR(); + } + } + } + } + + SAFE_COM_RELEASE(m_BackBufferResource); + return true; } @@ -681,6 +1129,95 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) m_LastFullRange = fullRange; } +/** + * \brief Set the output colorspace + * + * According to the colorspace from the source, set the corresponding output colorspace + * + * \param AVFrame* frame The frame to be displayed on screen + * \return void + */ +void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) +{ + //Do Nothing when Moolight's HDR is disabled + if(!m_IsHDRenabled){ + return; + } + + bool frameFullRange = isFrameFullRange(frame); + int frameColorSpace = getFrameColorspace(frame); + + // [TODO] Fix the bug with VideoProcessorSetStreamColorSpace1 not working from the first frame + // BUG: If I try to set m_StreamColorSpace correctly since the beginning (=14), the renderer doesn't apply the color space, + // The frame appear gray. The temporary fix is to start from a wrong Color space (13), and switch to 14 after few milliseconds. + // At set the time to 100ms to not have any visual impact at loading, but even 1ms fix the issue, the bug might be linked to the 1st frame. + // This is a non-blocking issue, but I need to investigate further the reason of such a behavior. + auto now = std::chrono::system_clock::now(); + long nowTime = std::chrono::duration_cast(now.time_since_epoch()).count(); + if(setStreamColorSpace && nowTime >= nextTime){ + nextTime = nowTime + increment; + if(streamIndex >= 1){ + setStreamColorSpace = false; + } + switch (frameColorSpace) { + case COLORSPACE_REC_2020: + m_StreamColorSpace = StreamColorSpacesFixHDR[streamIndex]; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + break; + default: + m_StreamColorSpace = StreamColorSpacesFixSDR[streamIndex]; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + } + if(setStreamColorSpace){ + streamIndex++; + } + } + + // If nothing has changed since last frame, we're done + if (frameColorSpace == m_LastColorSpace && frameFullRange == m_LastFullRange) { + return; + } + + m_LastColorSpace = frameColorSpace; + m_LastFullRange = frameFullRange; + + switch (frameColorSpace) { + case COLORSPACE_REC_2020: + // For an unclear reason in HDR mode (D3D11 bug?), when the 4 following filters, Brightness (0), Contrast (100), hue (0) and saturation (100), + // are all together at their default value, the tone tends to slight red. It is easy to see when streaming its own screen + // using an inception effect. + // The solution is the set Hue at -1, it doesn't impact the visual (compare to others), and it fixes the color issue. + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, -1); + // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disbaled (which is fine as we already have native HDR) + m_StreamColorSpace = ColorSpaces[14]; + if(m_VideoEnhancement->isVendorNVIDIA()){ + // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) + // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) + enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. + } + // Reset the fix HDR Stream + setStreamColorSpace = true; + streamIndex = 0; + break; + default: + // For SDR we can use default values. + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, 0); + // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) + // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, + // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering + m_StreamColorSpace = ColorSpaces[8]; + if(m_VideoEnhancement->isVendorNVIDIA()){ + // Always enable NVIDIA VSR for SDR Stream content + enableNvidiaVideoSuperResolution(); + } + // Reset the fix SDR Stream as it does work when back and forth with HDR on Server + setStreamColorSpace = true; + streamIndex = 0; + } + + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); +} + void D3D11VARenderer::renderVideo(AVFrame* frame) { // Bind video rendering vertex buffer @@ -698,14 +1235,189 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) srcBox.back = 1; m_DeviceContext->CopySubresourceRegion(m_VideoTexture, 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); - // Bind our CSC shader (and constant buffer, if required) - bindColorConversion(frame); + // Draw the video + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Prepare the Stream + prepareVideoProcessorStream(frame); + // Render to the front the frames processed by the Video Processor + m_VideoContext->VideoProcessorBlt(m_VideoProcessor, m_OutputView.Get(), 0, 1, &m_StreamData); + } else { + // Bind our CSC shader (and constant buffer, if required) + bindColorConversion(frame); - // Bind SRVs for this frame - m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews); + // Bind SRVs for this frame + m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews); - // Draw the video - m_DeviceContext->DrawIndexed(6, 0, 0); + // Draw the video + m_DeviceContext->DrawIndexed(6, 0, 0); + } +} + +/** + * \brief Add the Video Processor to the pipeline + * + * Creating a Video Processor add additional GPU video processing method like AI Upscaling + * + * \param bool reset default is false, at true it forces the recreate the Video Processor + * \return bool Returns true if the Video processor is successfully created + */ +bool D3D11VARenderer::createVideoProcessor(bool reset) +{ + HRESULT hr; + + // [TODO] This timer is only used to fix a problem with VideoProcessorSetStreamColorSpace1 not properly applied at the beginning + // These 3 lines can be removed once the bug (non-blocking) is fixed. + auto now = std::chrono::system_clock::now(); + startTime = std::chrono::duration_cast(now.time_since_epoch()).count(); + nextTime = startTime + increment; + + D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; + ZeroMemory(&content_desc, sizeof(content_desc)); + + if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { + hr = m_VideoProcessorEnumerator->GetVideoProcessorContentDesc(&content_desc); + if (FAILED(hr)) + return false; + + if (content_desc.InputWidth != m_DecoderParams.width || + content_desc.InputHeight != m_DecoderParams.height || + content_desc.OutputWidth != m_DisplayWidth || + content_desc.OutputHeight != m_DisplayHeight || reset) { + m_VideoProcessorEnumerator.Release(); + m_VideoProcessor.Release(); + } + else { + return true; + } + } + + ZeroMemory(&content_desc, sizeof(content_desc)); + content_desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; + content_desc.InputFrameRate.Numerator = m_DecoderParams.frameRate; + content_desc.InputFrameRate.Denominator = 1; + content_desc.InputWidth = m_DecoderParams.width; + content_desc.InputHeight = m_DecoderParams.height; + content_desc.OutputWidth = m_DisplayWidth; + content_desc.OutputHeight = m_DisplayHeight; + content_desc.OutputFrameRate.Numerator = m_DecoderParams.frameRate; + content_desc.OutputFrameRate.Denominator = 1; + content_desc.Usage = D3D11_VIDEO_USAGE_OPTIMAL_SPEED; + + hr = m_VideoDevice->CreateVideoProcessorEnumerator(&content_desc, &m_VideoProcessorEnumerator); + if (FAILED(hr)) + return false; + + hr = m_VideoDevice->CreateVideoProcessor(m_VideoProcessorEnumerator, 0, + &m_VideoProcessor); + if (FAILED(hr)) + return false; + + m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor, 0, false); + m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); + + // The output surface will be read by Direct3D shaders (It seems useless in our context) + m_VideoContext->VideoProcessorSetOutputShaderUsage(m_VideoProcessor, true); + + // Set Background color + D3D11_VIDEO_COLOR bgColor; + bgColor.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor, true, &bgColor); + + ZeroMemory(&m_OutputViewDesc, sizeof(m_OutputViewDesc)); + m_OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; + m_OutputViewDesc.Texture2D.MipSlice = 0; + + hr = m_VideoDevice->CreateVideoProcessorOutputView( + m_BackBufferResource, + m_VideoProcessorEnumerator, + &m_OutputViewDesc, + (ID3D11VideoProcessorOutputView**)&m_OutputView); + if (FAILED(hr)) { + return false; + } + + ZeroMemory(&m_InputViewDesc, sizeof(m_InputViewDesc)); + m_InputViewDesc.FourCC = 0; + m_InputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + m_InputViewDesc.Texture2D.MipSlice = 0; + m_InputViewDesc.Texture2D.ArraySlice = 0; + + hr = m_VideoDevice->CreateVideoProcessorInputView( + m_VideoTexture, m_VideoProcessorEnumerator, &m_InputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); + if (FAILED(hr)) + return false; + + // Apply processed filters to the surface + RECT srcRect = { 0 }; + srcRect.right = m_DecoderParams.width; + srcRect.bottom = m_DecoderParams.height; + + RECT dstRect = { 0 }; + dstRect.right = m_DisplayWidth; + dstRect.bottom = m_DisplayHeight; + + // Sscale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + + // [TODO] There is a behavior I don't understand (bug?) when the destination desRect is larger by one of its side than the source srcRect. + // If it is bigger, the window becomes black, but if it is smaller it is fine. + // Only one case is working when it is bigger is when the dstRest perfectly equal to the Display size. + // Investigation: If there anything to do with pixel alignment (c.f. dxva2.cpp FFALIGN), or screenSpaceToNormalizedDeviceCoords ? + // Fix: When bigger we strech the picture to the window, it will be deformed, but at least will not crash. + if(m_DisplayWidth < m_DecoderParams.width && m_DisplayHeight < m_DecoderParams.height){ + if(ratioHeight < ratioWidth){ + // Adjust the Width + long width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); + dstRect.left = static_cast(std::floor( abs(m_DisplayWidth - width) / 2 )); + dstRect.right = dstRect.left + width; + } else if(ratioWidth < ratioHeight) { + // Adjust the Height + long height = static_cast(std::floor(m_DecoderParams.height * ratioWidth)); + dstRect.top = static_cast(std::floor( abs(m_DisplayHeight - height) / 2 )); + dstRect.bottom = dstRect.top + height; + } + } + + m_VideoContext->VideoProcessorSetStreamSourceRect(m_VideoProcessor, 0, true, &srcRect); + m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor, 0, true, &dstRect); + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor, 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); + + ZeroMemory(&m_StreamData, sizeof(m_StreamData)); + m_StreamData.Enable = true; + m_StreamData.OutputIndex = m_OutputIndex; + m_StreamData.InputFrameOrField = 0; + m_StreamData.PastFrames = 0; + m_StreamData.FutureFrames = 0; + m_StreamData.ppPastSurfaces = nullptr; + m_StreamData.ppFutureSurfaces = nullptr; + m_StreamData.pInputSurface = m_InputView.Get(); + m_StreamData.ppPastSurfacesRight = nullptr; + m_StreamData.ppFutureSurfacesRight = nullptr; + m_StreamData.pInputSurfaceRight = nullptr; + + // Prepare HDR Meta Data for Stream content + setHDRStream(); + + // Prepare HDR Meta Data for the OutPut Monitor, will be ignored while using SDR + setHDROutPut(); + + // Set OutPut ColorSpace, m_OutputColorSpace is found from the active monitor earlier in D3D11VARenderer::initialize() + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, m_OutputColorSpace); + + // The section is a customization to enhance (non-AI) shlithly the frame + // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 50); // (0 / 0 / 100) + // As no effect as the picture is not distorted + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) + + + setStreamColorSpace = true; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + + return true; } // This function must NOT use any DXGI or ID3D11DeviceContext methods @@ -860,6 +1572,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) // Check if the format is supported by this decoder BOOL supported; + m_IsHDRenabled = false; switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: @@ -905,6 +1618,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } + m_IsHDRenabled = true; break; case VIDEO_FORMAT_AV1_MAIN8: @@ -935,6 +1649,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } + m_IsHDRenabled = true; break; default: @@ -1130,8 +1845,7 @@ bool D3D11VARenderer::setupRenderingResources() // Create our render target view { - ID3D11Resource* backBufferResource; - hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&backBufferResource); + hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&m_BackBufferResource); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "IDXGISwapChain::GetBuffer() failed: %x", @@ -1139,8 +1853,8 @@ bool D3D11VARenderer::setupRenderingResources() return false; } - hr = m_Device->CreateRenderTargetView(backBufferResource, nullptr, &m_RenderTargetView); - backBufferResource->Release(); + hr = m_Device->CreateRenderTargetView(m_BackBufferResource, nullptr, &m_RenderTargetView); + // m_BackBufferResource is still needed in createVideoProcessor(), therefore will be released later if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateRenderTargetView() failed: %x", @@ -1280,7 +1994,8 @@ bool D3D11VARenderer::setupVideoTexture() texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; - texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement + texDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 8421fdb46..bc10e4d89 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -2,8 +2,12 @@ #include "renderer.h" -#include +#include #include +#include +#include +#include +#include "streaming/video/videoenhancement.h" extern "C" { #include @@ -30,12 +34,24 @@ class D3D11VARenderer : public IFFmpegRenderer bool setupVideoTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); + void prepareVideoProcessorStream(AVFrame* frame); void renderVideo(AVFrame* frame); + bool createVideoProcessor(bool reset = false); + void enableAMDVideoSuperResolution(bool activate = true); + void enableIntelVideoSuperResolution(bool activate = true); + void enableNvidiaVideoSuperResolution(bool activate = true); + void enableAMDHDR(bool activate = true); + void enableIntelHDR(bool activate = true); + void enableNvidiaHDR(bool activate = true); bool checkDecoderSupport(IDXGIAdapter* adapter); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); + void setHDRStream(); + void setHDROutPut(); int m_DecoderSelectionPass; + int m_AdapterIndex = 0; + int m_OutputIndex = 0; IDXGIFactory5* m_Factory; ID3D11Device* m_Device; IDXGISwapChain4* m_SwapChain; @@ -43,6 +59,68 @@ class D3D11VARenderer : public IFFmpegRenderer ID3D11RenderTargetView* m_RenderTargetView; SDL_mutex* m_ContextLock; + ID3D11VideoDevice* m_VideoDevice; + ID3D11VideoContext2* m_VideoContext; + CComPtr m_VideoProcessor; + CComPtr m_VideoProcessorEnumerator; + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC m_OutputViewDesc; + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC m_InputViewDesc; + D3D11_VIDEO_PROCESSOR_STREAM m_StreamData; + Microsoft::WRL::ComPtr m_OutputView; + Microsoft::WRL::ComPtr m_InputView; + ID3D11Resource* m_BackBufferResource; + VideoEnhancement* m_VideoEnhancement; + bool m_IsHDRenabled = false; + + // Variable unused, but keep it as reference for debugging purpose + DXGI_COLOR_SPACE_TYPE ColorSpaces[26] = { + DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709, // 0 - A + DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709, // 1 - A + DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709, // 2 - I * A + DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020, // 3 - I* + DXGI_COLOR_SPACE_RESERVED, // 4 + DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601, // 5 - O A + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601, // 6 - I A + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601, // 7 - O A + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 - I A + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 - A + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020, // 10 - I + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020, // 11 - O + DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020, // 12 - O O + DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 - I + DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 - I I* + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_TOPLEFT_P2020, // 15 - I + DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_TOPLEFT_P2020, // 16 - I + DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P2020, // 17 - I I* + DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020, // 18 - I + DXGI_COLOR_SPACE_YCBCR_FULL_GHLG_TOPLEFT_P2020, // 19 - I + DXGI_COLOR_SPACE_RGB_STUDIO_G24_NONE_P709, // 20 - I I* + DXGI_COLOR_SPACE_RGB_STUDIO_G24_NONE_P2020, // 21 - I* + DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_LEFT_P709, // 22 - I + DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_LEFT_P2020, // 23 - I I + DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_TOPLEFT_P2020, // 24 - I + DXGI_COLOR_SPACE_CUSTOM, // 25 + }; + + DXGI_COLOR_SPACE_TYPE m_StreamColorSpace = ColorSpaces[8]; // SDR-Only (HDR is 14) + DXGI_COLOR_SPACE_TYPE m_OutputColorSpace = ColorSpaces[12]; // SDR & HDR + + // [TODO] Remove the timer feature once the bug with VideoProcessorSetStreamColorSpace1 is fixed + bool setStreamColorSpace = true; + long startTime; + long nextTime; + int streamIndex = 0; + int increment = 100; + DXGI_COLOR_SPACE_TYPE StreamColorSpacesFixHDR[2] = { + DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 + DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 + }; + DXGI_COLOR_SPACE_TYPE StreamColorSpacesFixSDR[2] = { + DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 + DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 + }; + ID3D11ShaderResourceView* m_VideoTextureResourceView; + DECODER_PARAMETERS m_DecoderParams; int m_DisplayWidth; int m_DisplayHeight; diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp index 5db52aefd..31a816be4 100644 --- a/app/streaming/video/videoenhancement.cpp +++ b/app/streaming/video/videoenhancement.cpp @@ -83,7 +83,7 @@ bool VideoEnhancement::setGPUinformation() // Set GPU information m_VendorId = adapterIdentifier.VendorId; m_GPUname = description; - m_DriverVersion = GetVideoDriverInfo(); + m_DriverVersion = getVideoDriverInfo(); qInfo() << "Active GPU: " << m_GPUname; qInfo() << "Video Driver: " << m_DriverVersion; @@ -108,7 +108,7 @@ bool VideoEnhancement::setGPUinformation() * * \return int Returns the Video driver version as an integer */ -int VideoEnhancement::GetVideoDriverInfo() +int VideoEnhancement::getVideoDriverInfo() { HKEY hKey = nullptr; diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 24d8e7502..9b42472e1 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -19,7 +19,7 @@ class VideoEnhancement : public QObject bool m_Initialized = false; bool m_Enabled = false; - bool m_UIvisible = true; // [Bruno] It should be false, and turn true by dxva2.cpp + bool m_UIvisible = false; // Vendors' name (PCI Special Interest Group) const int VENDOR_ID_AMD = 4098; @@ -49,7 +49,7 @@ class VideoEnhancement : public QObject VideoEnhancement& operator=(const VideoEnhancement&); bool setGPUinformation(); - int GetVideoDriverInfo(); + int getVideoDriverInfo(); public: static VideoEnhancement& getInstance(); From 6c0181d2ae8929ffaec35cf4e57aca2bdf0e97f6 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 24 Feb 2024 17:44:19 +0100 Subject: [PATCH 07/53] Correct spelling in comments --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index f636ac337..dccc5b003 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -183,9 +183,9 @@ D3D11VARenderer::~D3D11VARenderer() /** * \brief Set Monitor HDR MetaData information * - * Get the Monitor HDT MetaData via LimeLight library + * Get the Monitor HDR MetaData via LimeLight library * - * \param PSS_HDR_METADATA* HDRMetaData The varaible to set the metadata information + * \param PSS_HDR_METADATA* HDRMetaData The variable to set the metadata information * \return bool Return True is succeed */ void D3D11VARenderer::setHDRStream(){ @@ -238,9 +238,9 @@ void D3D11VARenderer::setHDRStream(){ /** * \brief Set Monitor HDR MetaData information * - * Get the Monitor HDT MetaData via LimeLight library + * Get the Monitor HDR MetaData via LimeLight library * - * \param PSS_HDR_METADATA* HDRMetaData The varaible to set the metadata information + * \param PSS_HDR_METADATA* HDRMetaData The variable to set the metadata information * \return bool Return True is succeed */ void D3D11VARenderer::setHDROutPut(){ From 05000db7390ff373e0f73b3c4b25f14d88016387 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 24 Feb 2024 19:43:23 +0100 Subject: [PATCH 08/53] Add flag D3D11_BIND_RENDER_TARGET only if Video enhancement is enabled --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index dccc5b003..14901f91a 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1994,8 +1994,12 @@ bool D3D11VARenderer::setupVideoTexture() texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement - texDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; + StreamingPreferences streamingPreferences; + if(streamingPreferences.videoEnhancement && m_VideoEnhancement->isEnhancementCapable()){ + texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; + } texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; From 937be567340ce6513797ccbb6c7673421cf29a01 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 26 Feb 2024 10:16:05 +0100 Subject: [PATCH 09/53] Replace the variable m_IsHDRenabled m_IsHDRenabled was a duplication of the existing condition "m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT". Replace the variable m_IsHDRenabled (2) m_IsHDRenabled was a duplication of the existing condition "m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT". --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 8 ++------ app/streaming/video/ffmpeg-renderers/d3d11va.h | 1 - 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 14901f91a..61946d08b 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -192,7 +192,6 @@ void D3D11VARenderer::setHDRStream(){ DXGI_HDR_METADATA_HDR10 streamHDRMetaData; // Prepare HDR Meta Data for Stream content SS_HDR_METADATA hdrMetadata; - // if (m_VideoProcessor.p && m_IsHDRenabled && LiGetHdrMetadata(&hdrMetadata)) { if (m_VideoProcessor.p && LiGetHdrMetadata(&hdrMetadata)) { // Magic constants to convert to fixed point. @@ -1139,8 +1138,8 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) */ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) { - //Do Nothing when Moolight's HDR is disabled - if(!m_IsHDRenabled){ + //Do Nothing when Moonlight's HDR is disabled + if(!(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT)){ return; } @@ -1572,7 +1571,6 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) // Check if the format is supported by this decoder BOOL supported; - m_IsHDRenabled = false; switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: @@ -1618,7 +1616,6 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } - m_IsHDRenabled = true; break; case VIDEO_FORMAT_AV1_MAIN8: @@ -1649,7 +1646,6 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } - m_IsHDRenabled = true; break; default: diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index bc10e4d89..c986fba0f 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -70,7 +70,6 @@ class D3D11VARenderer : public IFFmpegRenderer Microsoft::WRL::ComPtr m_InputView; ID3D11Resource* m_BackBufferResource; VideoEnhancement* m_VideoEnhancement; - bool m_IsHDRenabled = false; // Variable unused, but keep it as reference for debugging purpose DXGI_COLOR_SPACE_TYPE ColorSpaces[26] = { From 362f5caafaad0976fec322b148d25892acdf5cbe Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 26 Feb 2024 10:31:00 +0100 Subject: [PATCH 10/53] Change the method to get Video Driver version to a more conventional way Remove VideoEnhancement::getVideoDriverInfo() method (which was based of Window Registry) and use the existing method CheckInterfaceSupport() from the Adapter. --- app/streaming/video/videoenhancement.cpp | 87 +++--------------------- app/streaming/video/videoenhancement.h | 14 ++-- 2 files changed, 17 insertions(+), 84 deletions(-) diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp index 31a816be4..fbf0454ed 100644 --- a/app/streaming/video/videoenhancement.cpp +++ b/app/streaming/video/videoenhancement.cpp @@ -3,7 +3,6 @@ #include #include #include -#include #include @@ -49,8 +48,6 @@ bool VideoEnhancement::setGPUinformation() { bool success = false; -#ifdef Q_OS_WIN - // Create a Direct3D 11 device ID3D11Device* pD3DDevice = nullptr; ID3D11DeviceContext* pContext = nullptr; @@ -83,14 +80,22 @@ bool VideoEnhancement::setGPUinformation() // Set GPU information m_VendorId = adapterIdentifier.VendorId; m_GPUname = description; - m_DriverVersion = getVideoDriverInfo(); + + LARGE_INTEGER umd_version; + pAdapter->CheckInterfaceSupport(__uuidof(IDXGIDevice), &umd_version); + // Integer part + m_DriverVersion = HIWORD(umd_version.LowPart); + // Decimal part + double digits = static_cast(floor(log10(LOWORD(umd_version.LowPart))) + 1); + if(digits > 0){ + m_DriverVersion += LOWORD(umd_version.LowPart) / pow(10, digits); + } qInfo() << "Active GPU: " << m_GPUname; qInfo() << "Video Driver: " << m_DriverVersion; } } - } // Release resources @@ -98,81 +103,9 @@ bool VideoEnhancement::setGPUinformation() if (pDXGIDevice) pDXGIDevice->Release(); if (pAdapter) pAdapter->Release(); - #endif - return success; } -/** - * \brief Get the Video driver version - * - * \return int Returns the Video driver version as an integer - */ -int VideoEnhancement::getVideoDriverInfo() -{ - - HKEY hKey = nullptr; - const wchar_t* SUBKEY = L"SYSTEM\\CurrentControlSet\\Control\\Video"; - - if (ERROR_SUCCESS != RegOpenKeyExW(HKEY_LOCAL_MACHINE, SUBKEY, 0, KEY_ENUMERATE_SUB_KEYS, &hKey)) - return m_DriverVersion; - - LSTATUS sta = ERROR_SUCCESS; - wchar_t keyName[128] = {}; - DWORD index = 0; - DWORD len; - - do - { - len = sizeof(keyName) / sizeof(wchar_t); - sta = RegEnumKeyExW(hKey, index, keyName, &len, nullptr, nullptr, nullptr, nullptr); - index++; - - if (sta != ERROR_SUCCESS) - continue; - - std::wstring subkey(SUBKEY); - subkey.append(L"\\"); - subkey.append(keyName); - subkey.append(L"\\"); - subkey.append(L"0000"); - DWORD lg; - - wchar_t desc[128] = {}; - lg = sizeof(desc) / sizeof(wchar_t); - if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, subkey.c_str(), L"DriverDesc", - RRF_RT_REG_SZ, nullptr, desc, &lg)) - continue; - - std::wstring s_desc(desc); - if (s_desc != m_GPUname) - continue; - - // Driver of interest found, we read version - wchar_t charVersion[64] = {}; - lg = sizeof(charVersion) / sizeof(wchar_t); - if (ERROR_SUCCESS != RegGetValueW(HKEY_LOCAL_MACHINE, subkey.c_str(), L"DriverVersion", - RRF_RT_REG_SZ, nullptr, charVersion, &lg)) - continue; - - std::wstring strVersion(charVersion); - - // Convert driver store version to Nvidia version - if (isVendorNVIDIA()) // NVIDIA - { - strVersion = std::regex_replace(strVersion, std::wregex(L"\\."), L""); - m_DriverVersion = std::stoi(strVersion.substr(strVersion.length() - 5, 5)); - } - else // AMD/Intel/WDDM - { - m_DriverVersion = std::stoi(strVersion.substr(0, strVersion.find('.'))); - } - } while (sta == ERROR_SUCCESS); - RegCloseKey(hKey); - - return m_DriverVersion; -} - /** * \brief Check if the vendor is AMD * diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 9b42472e1..f39be0a1d 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -27,19 +27,19 @@ class VideoEnhancement : public QObject const int VENDOR_ID_NVIDIA = 4318; // Minimum driver version accepted for VSR feature - const int MIN_VSR_DRIVER_VERSION_AMD = 24; // It is implemented from the driver 24.1.1 - const int MIN_VSR_DRIVER_VERSION_INTEL = 28; // It will ensure to cover 27.20 version - const int MIN_VSR_DRIVER_VERSION_NVIDIA = 54584; // NVIDIA driver name are always in the format XXX.XX (https://www.nvidia.com/en-gb/drivers/drivers-faq/) + const double MIN_VSR_DRIVER_VERSION_AMD = 21910.5; // AMD Driver Version 23.19.10 (Jan 23rd, 2024) + const double MIN_VSR_DRIVER_VERSION_INTEL = 100.8681; // Intel Driver Version 27.20.100.8681 (Sept 15, 2020) + const double MIN_VSR_DRIVER_VERSION_NVIDIA = 15.4584; // NVIDIA Driver Version 545.84 (Oct 13, 2023) // Minimum driver version accepted for HDR feature - const int MIN_HDR_DRIVER_VERSION_AMD = 0; // To be determined, this feature has not yet been announced by AMD - const int MIN_HDR_DRIVER_VERSION_INTEL = 0; // To be determined, this feature has not yet been announced by Intel - const int MIN_HDR_DRIVER_VERSION_NVIDIA = 55123; // https://www.nvidia.com/download/driverResults.aspx/218114/en-us/ + const double MIN_HDR_DRIVER_VERSION_AMD = 0; // To be determined, this feature has not yet been announced by AMD + const double MIN_HDR_DRIVER_VERSION_INTEL = 0; // To be determined, this feature has not yet been announced by Intel + const double MIN_HDR_DRIVER_VERSION_NVIDIA = 15.5123; // https://www.nvidia.com/download/driverResults.aspx/218114/en-us/ // GPU information int m_VendorId = 0; std::wstring m_GPUname = L"Unknown"; - int m_DriverVersion = 0; + double m_DriverVersion = 0; // Disable the constructor from outside to avoid a new instance VideoEnhancement(); From ae8118b89e7ad20fd31f2e29b0826c6921f6898d Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 26 Feb 2024 10:45:34 +0100 Subject: [PATCH 11/53] Use Hexadecimal value for VendorID --- app/streaming/video/videoenhancement.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index f39be0a1d..6d0493036 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -22,9 +22,9 @@ class VideoEnhancement : public QObject bool m_UIvisible = false; // Vendors' name (PCI Special Interest Group) - const int VENDOR_ID_AMD = 4098; - const int VENDOR_ID_INTEL = 32902; - const int VENDOR_ID_NVIDIA = 4318; + const int VENDOR_ID_AMD = 0x1002; + const int VENDOR_ID_INTEL = 0x8086; + const int VENDOR_ID_NVIDIA = 0x10DE; // Minimum driver version accepted for VSR feature const double MIN_VSR_DRIVER_VERSION_AMD = 21910.5; // AMD Driver Version 23.19.10 (Jan 23rd, 2024) From 7e317ef0fec8ed04b79b966bf02b53356973f3b0 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 26 Feb 2024 23:09:02 +0100 Subject: [PATCH 12/53] Correct the HDR metadata for Stream and Output sources - MaxCLL and MaxFALL at 0 as the source content is unknown in advance. - Output ColorSpace matched SwapChain --- .../video/ffmpeg-renderers/d3d11va.cpp | 112 ++++++++---------- .../video/ffmpeg-renderers/d3d11va.h | 20 ++-- 2 files changed, 61 insertions(+), 71 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 61946d08b..8a5a5ef3c 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -189,15 +189,13 @@ D3D11VARenderer::~D3D11VARenderer() * \return bool Return True is succeed */ void D3D11VARenderer::setHDRStream(){ + DXGI_HDR_METADATA_HDR10 streamHDRMetaData; + // Prepare HDR Meta Data for Stream content SS_HDR_METADATA hdrMetadata; if (m_VideoProcessor.p && LiGetHdrMetadata(&hdrMetadata)) { - // Magic constants to convert to fixed point. - // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 - static constexpr int kMinLuminanceFixedPoint = 10000; - streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; streamHDRMetaData.GreenPrimary[0] = hdrMetadata.displayPrimaries[1].x; @@ -206,22 +204,13 @@ void D3D11VARenderer::setHDRStream(){ streamHDRMetaData.BluePrimary[1] = hdrMetadata.displayPrimaries[2].y; streamHDRMetaData.WhitePoint[0] = hdrMetadata.whitePoint.x; streamHDRMetaData.WhitePoint[1] = hdrMetadata.whitePoint.y; - streamHDRMetaData.MaxMasteringLuminance = hdrMetadata.maxDisplayLuminance * kMinLuminanceFixedPoint; + streamHDRMetaData.MaxMasteringLuminance = hdrMetadata.maxDisplayLuminance; streamHDRMetaData.MinMasteringLuminance = hdrMetadata.minDisplayLuminance; - streamHDRMetaData.MaxContentLightLevel = hdrMetadata.maxContentLightLevel; - streamHDRMetaData.MaxFrameAverageLightLevel = hdrMetadata.maxFrameAverageLightLevel; - if(streamHDRMetaData.MaxContentLightLevel == 0){ - streamHDRMetaData.MaxContentLightLevel = streamHDRMetaData.MaxFrameAverageLightLevel; - } - - // [TODO] (Source: https://github.com/xbmc) For AMD/HDR, - // we apparently need to do a custom tone (MaxMasteringLuminance=10000, MinMasteringLuminance=0). - // Yet to be verified - // if(m_VideoEnhancement->isVendorAMD()){ - // m_StreamHDRMetaData.MaxMasteringLuminance = 10000; - // m_StreamHDRMetaData.MinMasteringLuminance = 0; - // } + // As the Content is unknown since it is streamed, MaxCLL and MaxFALL cannot be evaluated from the source on the fly, + // therefore streamed source returns 0 as value for both. We can safetly set them to 0. + streamHDRMetaData.MaxContentLightLevel = 0; + streamHDRMetaData.MaxFrameAverageLightLevel = 0; // Set HDR Stream (input) Meta data m_VideoContext->VideoProcessorSetStreamHDRMetaData( @@ -253,9 +242,6 @@ void D3D11VARenderer::setHDROutPut(){ if (SUCCEEDED(pOutput.As(&pOutput6))){ DXGI_OUTPUT_DESC1 desc1 {}; if (SUCCEEDED(pOutput6->GetDesc1(&desc1))){ - // Get Monitor ColorSpace for SDR and HDR (if the monitor is capable of HDR) - m_OutputColorSpace = desc1.ColorSpace; - // Magic constants to convert to fixed point. // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 static constexpr int kPrimariesFixedPoint = 50000; @@ -272,28 +258,22 @@ void D3D11VARenderer::setHDROutPut(){ outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; - // MaxContentLightLevel is not available in DXGI_OUTPUT_DESC1 structure - // https://learn.microsoft.com/fr-fr/windows/win32/api/dxgi1_6/ns-dxgi1_6-dxgi_output_desc1 - // But MaxContentLightLevel is not needed and greater or equal to MaxFullFrameLuminance, so it is safe to set a minimum for it - // https://professionalsupport.dolby.com/s/article/Calculation-of-MaxFALL-and-MaxCLL-metadata - // Also note that these are not fixed-point. - outputHDRMetaData.MaxContentLightLevel = desc1.MaxFullFrameLuminance; - outputHDRMetaData.MaxFrameAverageLightLevel = desc1.MaxFullFrameLuminance; + // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. + outputHDRMetaData.MaxContentLightLevel = 0; + outputHDRMetaData.MaxFrameAverageLightLevel = 0; + + if (m_VideoProcessor.p) { + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor, + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + } } } - } - - if (m_VideoProcessor.p) { - // Prepare HDR for the OutPut Monitor - m_VideoContext->VideoProcessorSetOutputHDRMetaData( - m_VideoProcessor, - DXGI_HDR_METADATA_TYPE_HDR10, - sizeof(DXGI_HDR_METADATA_HDR10), - &outputHDRMetaData - ); - } - - m_SwapChain->SetHDRMetaData(DXGI_HDR_METADATA_TYPE_HDR10, sizeof(outputHDRMetaData), &outputHDRMetaData); + } } bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) @@ -964,6 +944,9 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020) failed: %x", hr); } + if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); + }; } else { // Restore default sRGB colorspace @@ -973,6 +956,9 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709) failed: %x", hr); } + if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); + } } m_LastColorTrc = frame->color_trc; @@ -1153,22 +1139,22 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) // This is a non-blocking issue, but I need to investigate further the reason of such a behavior. auto now = std::chrono::system_clock::now(); long nowTime = std::chrono::duration_cast(now.time_since_epoch()).count(); - if(setStreamColorSpace && nowTime >= nextTime){ - nextTime = nowTime + increment; - if(streamIndex >= 1){ - setStreamColorSpace = false; + if(m_SetStreamColorSpace && nowTime >= m_NextTime){ + m_NextTime = nowTime + m_Increment; + if(m_StreamIndex >= 1){ + m_SetStreamColorSpace = false; } switch (frameColorSpace) { case COLORSPACE_REC_2020: - m_StreamColorSpace = StreamColorSpacesFixHDR[streamIndex]; + m_StreamColorSpace = m_StreamColorSpacesFixHDR[m_StreamIndex]; m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); break; default: - m_StreamColorSpace = StreamColorSpacesFixSDR[streamIndex]; + m_StreamColorSpace = m_StreamColorSpacesFixSDR[m_StreamIndex]; m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); } - if(setStreamColorSpace){ - streamIndex++; + if(m_SetStreamColorSpace){ + m_StreamIndex++; } } @@ -1187,16 +1173,16 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) // using an inception effect. // The solution is the set Hue at -1, it doesn't impact the visual (compare to others), and it fixes the color issue. m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, -1); - // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disbaled (which is fine as we already have native HDR) - m_StreamColorSpace = ColorSpaces[14]; + // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disabled (which is fine as we already have native HDR) + m_StreamColorSpace = m_ColorSpaces[14]; if(m_VideoEnhancement->isVendorNVIDIA()){ // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. } // Reset the fix HDR Stream - setStreamColorSpace = true; - streamIndex = 0; + m_SetStreamColorSpace = true; + m_StreamIndex = 0; break; default: // For SDR we can use default values. @@ -1204,14 +1190,14 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering - m_StreamColorSpace = ColorSpaces[8]; + m_StreamColorSpace = m_ColorSpaces[8]; if(m_VideoEnhancement->isVendorNVIDIA()){ // Always enable NVIDIA VSR for SDR Stream content enableNvidiaVideoSuperResolution(); } // Reset the fix SDR Stream as it does work when back and forth with HDR on Server - setStreamColorSpace = true; - streamIndex = 0; + m_SetStreamColorSpace = true; + m_StreamIndex = 0; } m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); @@ -1267,8 +1253,8 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) // [TODO] This timer is only used to fix a problem with VideoProcessorSetStreamColorSpace1 not properly applied at the beginning // These 3 lines can be removed once the bug (non-blocking) is fixed. auto now = std::chrono::system_clock::now(); - startTime = std::chrono::duration_cast(now.time_since_epoch()).count(); - nextTime = startTime + increment; + m_StartTime = std::chrono::duration_cast(now.time_since_epoch()).count(); + m_NextTime = m_StartTime + m_Increment; D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; ZeroMemory(&content_desc, sizeof(content_desc)); @@ -1401,8 +1387,12 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) // Prepare HDR Meta Data for the OutPut Monitor, will be ignored while using SDR setHDROutPut(); - // Set OutPut ColorSpace, m_OutputColorSpace is found from the active monitor earlier in D3D11VARenderer::initialize() - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, m_OutputColorSpace); + // Set OutPut ColorSpace + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); + } else { + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); + } // The section is a customization to enhance (non-AI) shlithly the frame // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result @@ -1413,7 +1403,7 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) - setStreamColorSpace = true; + m_SetStreamColorSpace = true; m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); return true; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index c986fba0f..4d11c0632 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -72,7 +72,7 @@ class D3D11VARenderer : public IFFmpegRenderer VideoEnhancement* m_VideoEnhancement; // Variable unused, but keep it as reference for debugging purpose - DXGI_COLOR_SPACE_TYPE ColorSpaces[26] = { + DXGI_COLOR_SPACE_TYPE m_ColorSpaces[26] = { DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709, // 0 - A DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709, // 1 - A DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709, // 2 - I * A @@ -101,20 +101,20 @@ class D3D11VARenderer : public IFFmpegRenderer DXGI_COLOR_SPACE_CUSTOM, // 25 }; - DXGI_COLOR_SPACE_TYPE m_StreamColorSpace = ColorSpaces[8]; // SDR-Only (HDR is 14) - DXGI_COLOR_SPACE_TYPE m_OutputColorSpace = ColorSpaces[12]; // SDR & HDR + DXGI_COLOR_SPACE_TYPE m_StreamColorSpace = m_ColorSpaces[8]; // SDR-Only (HDR is 14) + DXGI_COLOR_SPACE_TYPE m_OutputColorSpace = m_ColorSpaces[12]; // SDR & HDR // [TODO] Remove the timer feature once the bug with VideoProcessorSetStreamColorSpace1 is fixed - bool setStreamColorSpace = true; - long startTime; - long nextTime; - int streamIndex = 0; - int increment = 100; - DXGI_COLOR_SPACE_TYPE StreamColorSpacesFixHDR[2] = { + bool m_SetStreamColorSpace = true; + long m_StartTime; + long m_NextTime; + int m_StreamIndex = 0; + int m_Increment = 300; + DXGI_COLOR_SPACE_TYPE m_StreamColorSpacesFixHDR[2] = { DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 }; - DXGI_COLOR_SPACE_TYPE StreamColorSpacesFixSDR[2] = { + DXGI_COLOR_SPACE_TYPE m_StreamColorSpacesFixSDR[2] = { DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 }; From 9562a59cf557f1758d43e8b6eacd0811d8348f4c Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 26 Feb 2024 23:40:30 +0100 Subject: [PATCH 13/53] Replace CComPtr by Microsoft::WRL::ComPtr --- .../video/ffmpeg-renderers/d3d11va.cpp | 76 +++++++++---------- .../video/ffmpeg-renderers/d3d11va.h | 6 +- 2 files changed, 42 insertions(+), 40 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 8a5a5ef3c..109546e81 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -194,7 +194,7 @@ void D3D11VARenderer::setHDRStream(){ // Prepare HDR Meta Data for Stream content SS_HDR_METADATA hdrMetadata; - if (m_VideoProcessor.p && LiGetHdrMetadata(&hdrMetadata)) { + if (m_VideoProcessor && LiGetHdrMetadata(&hdrMetadata)) { streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; @@ -214,7 +214,7 @@ void D3D11VARenderer::setHDRStream(){ // Set HDR Stream (input) Meta data m_VideoContext->VideoProcessorSetStreamHDRMetaData( - m_VideoProcessor, + m_VideoProcessor.Get(), 0, DXGI_HDR_METADATA_TYPE_HDR10, sizeof(DXGI_HDR_METADATA_HDR10), @@ -262,10 +262,10 @@ void D3D11VARenderer::setHDROutPut(){ outputHDRMetaData.MaxContentLightLevel = 0; outputHDRMetaData.MaxFrameAverageLightLevel = 0; - if (m_VideoProcessor.p) { + if (m_VideoProcessor) { // Prepare HDR for the OutPut Monitor m_VideoContext->VideoProcessorSetOutputHDRMetaData( - m_VideoProcessor, + m_VideoProcessor.Get(), DXGI_HDR_METADATA_TYPE_HDR10, sizeof(DXGI_HDR_METADATA_HDR10), &outputHDRMetaData @@ -438,7 +438,7 @@ void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ param = kIntelVpeVersion3; hr = m_VideoContext->VideoProcessorSetOutputExtension( - m_VideoProcessor, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -455,7 +455,7 @@ void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ } hr = m_VideoContext->VideoProcessorSetOutputExtension( - m_VideoProcessor, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -472,7 +472,7 @@ void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ } hr = m_VideoContext->VideoProcessorSetStreamExtension( - m_VideoProcessor, 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + m_VideoProcessor.Get(), 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -522,7 +522,7 @@ void D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ UINT enable = activate; NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV1, kStreamExtensionMethodSuperResolution, enable}; - hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor, 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution failed: %x", @@ -598,7 +598,7 @@ void D3D11VARenderer::enableNvidiaHDR(bool activate){ UINT enable = activate; NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV4, kStreamExtensionMethodTrueHDR, enable, 0u}; - hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor, 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR failed: %x", @@ -944,8 +944,8 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020) failed: %x", hr); } - if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); + if (m_VideoProcessor && m_VideoProcessorEnumerator) { + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); }; } else { @@ -956,8 +956,8 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709) failed: %x", hr); } - if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); + if (m_VideoProcessor && m_VideoProcessorEnumerator) { + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } } @@ -1147,11 +1147,11 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) switch (frameColorSpace) { case COLORSPACE_REC_2020: m_StreamColorSpace = m_StreamColorSpacesFixHDR[m_StreamIndex]; - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); break; default: m_StreamColorSpace = m_StreamColorSpacesFixSDR[m_StreamIndex]; - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); } if(m_SetStreamColorSpace){ m_StreamIndex++; @@ -1172,7 +1172,7 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) // are all together at their default value, the tone tends to slight red. It is easy to see when streaming its own screen // using an inception effect. // The solution is the set Hue at -1, it doesn't impact the visual (compare to others), and it fixes the color issue. - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, -1); + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, -1); // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disabled (which is fine as we already have native HDR) m_StreamColorSpace = m_ColorSpaces[14]; if(m_VideoEnhancement->isVendorNVIDIA()){ @@ -1186,7 +1186,7 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) break; default: // For SDR we can use default values. - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, 0); + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, 0); // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering @@ -1200,7 +1200,7 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) m_StreamIndex = 0; } - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); } void D3D11VARenderer::renderVideo(AVFrame* frame) @@ -1225,7 +1225,7 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) // Prepare the Stream prepareVideoProcessorStream(frame); // Render to the front the frames processed by the Video Processor - m_VideoContext->VideoProcessorBlt(m_VideoProcessor, m_OutputView.Get(), 0, 1, &m_StreamData); + m_VideoContext->VideoProcessorBlt(m_VideoProcessor.Get(), m_OutputView.Get(), 0, 1, &m_StreamData); } else { // Bind our CSC shader (and constant buffer, if required) bindColorConversion(frame); @@ -1259,7 +1259,7 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; ZeroMemory(&content_desc, sizeof(content_desc)); - if (m_VideoProcessor.p && m_VideoProcessorEnumerator.p) { + if (m_VideoProcessor && m_VideoProcessorEnumerator) { hr = m_VideoProcessorEnumerator->GetVideoProcessorContentDesc(&content_desc); if (FAILED(hr)) return false; @@ -1268,8 +1268,8 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) content_desc.InputHeight != m_DecoderParams.height || content_desc.OutputWidth != m_DisplayWidth || content_desc.OutputHeight != m_DisplayHeight || reset) { - m_VideoProcessorEnumerator.Release(); - m_VideoProcessor.Release(); + m_VideoProcessorEnumerator->Release(); + m_VideoProcessor->Release(); } else { return true; @@ -1292,21 +1292,21 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) if (FAILED(hr)) return false; - hr = m_VideoDevice->CreateVideoProcessor(m_VideoProcessorEnumerator, 0, + hr = m_VideoDevice->CreateVideoProcessor(m_VideoProcessorEnumerator.Get(), 0, &m_VideoProcessor); if (FAILED(hr)) return false; - m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor, 0, false); - m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); + m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor.Get(), 0, false); + m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); // The output surface will be read by Direct3D shaders (It seems useless in our context) - m_VideoContext->VideoProcessorSetOutputShaderUsage(m_VideoProcessor, true); + m_VideoContext->VideoProcessorSetOutputShaderUsage(m_VideoProcessor.Get(), true); // Set Background color D3D11_VIDEO_COLOR bgColor; bgColor.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color - m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor, true, &bgColor); + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), true, &bgColor); ZeroMemory(&m_OutputViewDesc, sizeof(m_OutputViewDesc)); m_OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; @@ -1314,7 +1314,7 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) hr = m_VideoDevice->CreateVideoProcessorOutputView( m_BackBufferResource, - m_VideoProcessorEnumerator, + m_VideoProcessorEnumerator.Get(), &m_OutputViewDesc, (ID3D11VideoProcessorOutputView**)&m_OutputView); if (FAILED(hr)) { @@ -1328,7 +1328,7 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) m_InputViewDesc.Texture2D.ArraySlice = 0; hr = m_VideoDevice->CreateVideoProcessorInputView( - m_VideoTexture, m_VideoProcessorEnumerator, &m_InputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); + m_VideoTexture, m_VideoProcessorEnumerator.Get(), &m_InputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); if (FAILED(hr)) return false; @@ -1364,9 +1364,9 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) } } - m_VideoContext->VideoProcessorSetStreamSourceRect(m_VideoProcessor, 0, true, &srcRect); - m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor, 0, true, &dstRect); - m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor, 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); + m_VideoContext->VideoProcessorSetStreamSourceRect(m_VideoProcessor.Get(), 0, true, &srcRect); + m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &dstRect); + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); ZeroMemory(&m_StreamData, sizeof(m_StreamData)); m_StreamData.Enable = true; @@ -1389,22 +1389,22 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) // Set OutPut ColorSpace if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); } else { - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor, DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } // The section is a customization to enhance (non-AI) shlithly the frame // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 50); // (0 / 0 / 100) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 50); // (0 / 0 / 100) // As no effect as the picture is not distorted - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) m_SetStreamColorSpace = true; - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor, 0, m_StreamColorSpace); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); return true; } diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 4d11c0632..48058e92b 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -61,8 +61,10 @@ class D3D11VARenderer : public IFFmpegRenderer ID3D11VideoDevice* m_VideoDevice; ID3D11VideoContext2* m_VideoContext; - CComPtr m_VideoProcessor; - CComPtr m_VideoProcessorEnumerator; + // CComPtr m_VideoProcessor; + // CComPtr m_VideoProcessorEnumerator; + Microsoft::WRL::ComPtr m_VideoProcessor; + Microsoft::WRL::ComPtr m_VideoProcessorEnumerator; D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC m_OutputViewDesc; D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC m_InputViewDesc; D3D11_VIDEO_PROCESSOR_STREAM m_StreamData; From 0ed68bf4d740641045a40542b0b279337b1f8c6b Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 27 Feb 2024 00:00:49 +0100 Subject: [PATCH 14/53] Remove "reset" parameter from the method "D3D11VARenderer::createVideoProcessor" "reset" was not used in latest code --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 5 ++--- app/streaming/video/ffmpeg-renderers/d3d11va.h | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 109546e81..108ad4510 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1243,10 +1243,9 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) * * Creating a Video Processor add additional GPU video processing method like AI Upscaling * - * \param bool reset default is false, at true it forces the recreate the Video Processor * \return bool Returns true if the Video processor is successfully created */ -bool D3D11VARenderer::createVideoProcessor(bool reset) +bool D3D11VARenderer::createVideoProcessor() { HRESULT hr; @@ -1267,7 +1266,7 @@ bool D3D11VARenderer::createVideoProcessor(bool reset) if (content_desc.InputWidth != m_DecoderParams.width || content_desc.InputHeight != m_DecoderParams.height || content_desc.OutputWidth != m_DisplayWidth || - content_desc.OutputHeight != m_DisplayHeight || reset) { + content_desc.OutputHeight != m_DisplayHeight) { m_VideoProcessorEnumerator->Release(); m_VideoProcessor->Release(); } diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 48058e92b..3658eaec1 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -36,7 +36,7 @@ class D3D11VARenderer : public IFFmpegRenderer void bindColorConversion(AVFrame* frame); void prepareVideoProcessorStream(AVFrame* frame); void renderVideo(AVFrame* frame); - bool createVideoProcessor(bool reset = false); + bool createVideoProcessor(); void enableAMDVideoSuperResolution(bool activate = true); void enableIntelVideoSuperResolution(bool activate = true); void enableNvidiaVideoSuperResolution(bool activate = true); @@ -61,8 +61,6 @@ class D3D11VARenderer : public IFFmpegRenderer ID3D11VideoDevice* m_VideoDevice; ID3D11VideoContext2* m_VideoContext; - // CComPtr m_VideoProcessor; - // CComPtr m_VideoProcessorEnumerator; Microsoft::WRL::ComPtr m_VideoProcessor; Microsoft::WRL::ComPtr m_VideoProcessorEnumerator; D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC m_OutputViewDesc; From 17d4d002c967b21cc111a01b3fb1fa09072afe4c Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 27 Feb 2024 00:05:37 +0100 Subject: [PATCH 15/53] Turn off D3D11_VIDEO_PROCESSOR_FILTER_HUE in SDR mode --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 108ad4510..a9985c8f9 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1186,7 +1186,7 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) break; default: // For SDR we can use default values. - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, 0); + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, false, 0); // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering From 7c22abda85970302c9fb6f69b4f323e93edad455 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 27 Feb 2024 00:13:13 +0100 Subject: [PATCH 16/53] Add the source links of some key values for VSR and SDR->HDR features --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index a9985c8f9..80bba9428 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -406,6 +406,8 @@ void D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ * This experimental feature from Intel is available starting from Intel iGPU from CPU Gen 10th (Skylake) and Intel graphics driver 27.20.100.8681 (Sept 15, 2020) * Only Arc GPUs seem to provide visual improvement * https://www.techpowerup.com/305558/intel-outs-video-super-resolution-for-chromium-browsers-works-with-igpus-11th-gen-onward + * Values from Chromium source code: + * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc * * \return void */ @@ -497,6 +499,8 @@ void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ * IMPORTANT (Feb 5th, 2024): RTX VSR seems to be limited to SDR content only, * it does add a grey filter if it is activated while HDR is on on stream (Host setting does not impact it). * It might be fixed later by NVIDIA, but the temporary solution is to disable the feature when Stream content is HDR-on + * Values from Chromium source code: + * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc * * \return void */ @@ -573,6 +577,8 @@ void D3D11VARenderer::enableIntelHDR(bool activate){ * can work without having the screen darker. Here are what I found: * 1) Moonlight HDR: Checked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_P010 => SDR convert to HDR, but with darker rendering * 2) Moonlight HDR: Unchecked / SwapChain: DXGI_FORMAT_R10G10B10A2_UNORM / VideoTexture: DXGI_FORMAT_NV12 => SDR convert to HDR, but with darker rendering + * Values from Chromium source code: + * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc * * \return void */ From 4bb3eff114b6472744e7426e20c44cfdc40aaaae Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 27 Feb 2024 06:30:14 +0100 Subject: [PATCH 17/53] Remove ID3D11VideoContext1::VideoProcessorSetOutputShaderUsage as it is not used --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 80bba9428..f158c00c0 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1305,9 +1305,6 @@ bool D3D11VARenderer::createVideoProcessor() m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor.Get(), 0, false); m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); - // The output surface will be read by Direct3D shaders (It seems useless in our context) - m_VideoContext->VideoProcessorSetOutputShaderUsage(m_VideoProcessor.Get(), true); - // Set Background color D3D11_VIDEO_COLOR bgColor; bgColor.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color From 9ca8b6f18e92914c38ebdf916be02e036f8fe036 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 27 Feb 2024 15:57:24 +0100 Subject: [PATCH 18/53] Enable Video AI-Enhancement in Exclusive Fullscreen - NVIDIA: After updating NVIDIA driver to 551.61, VSR works in Exclusive Fullscreen (Tested on a RTX 4070 Ti) - Intel: VSR works in Exclusive Fullscreen (Test on a Arc a380) - AMD: VSR is WIP --- app/gui/SettingsView.qml | 68 +--------------------------------------- 1 file changed, 1 insertion(+), 67 deletions(-) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index 449964543..80ad605fc 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -14,9 +14,6 @@ Flickable { objectName: qsTr("Settings") signal languageChanged() - signal displayModeChanged() - signal windowModeChanged() - signal videoEnhancementChanged() boundsBehavior: Flickable.OvershootBounds @@ -235,9 +232,6 @@ Flickable { recalculateWidth() lastIndexValue = currentIndex - - // Signal other controls - displayModeChanged() } id: resolutionComboBox @@ -299,9 +293,6 @@ Flickable { else { updateBitrateForSelection() } - - // Signal other controls - displayModeChanged() } NavigableDialog { @@ -320,9 +311,6 @@ Flickable { onClosed: { widthField.clear() heightField.clear() - - // Signal other controls - displayModeChanged() } onRejected: { @@ -772,25 +760,9 @@ Flickable { activated(currentIndex) } - // Video Super-Resolution does not work in exclusive full screen, so auto switch do borderless window - // [TODO] This may change according to what AMD and Intel will implement, if they can allow video enhancement in fullscreen - function checkVSR(){ - if(videoEnhancementCheck.checked && model.get(currentIndex).val === StreamingPreferences.WM_FULLSCREEN){ - for (var i = 0; i < model.count; i++) { - var thisWm = model.get(i).val; - if (model.get(i).val === StreamingPreferences.WM_FULLSCREEN_DESKTOP) { - currentIndex = i - break - } - } - activated(currentIndex) - } - } - Component.onCompleted: { reinitialize() languageChanged.connect(reinitialize) - videoEnhancementChanged.connect(checkVSR) } id: windowModeComboBox @@ -800,8 +772,6 @@ Flickable { textRole: "text" onActivated: { StreamingPreferences.windowMode = model.get(currentIndex).val - // Signal others - windowModeChanged() } ToolTip.delay: 1000 @@ -853,16 +823,9 @@ Flickable { visible: VideoEnhancement.isUIvisible() enabled: true checked: StreamingPreferences.videoEnhancement - property bool checkedSaved onCheckedChanged: { StreamingPreferences.videoEnhancement = checked - // The value of checkedSaved is set while changing the WindowMode, need to find a way not to. - if(StreamingPreferences.windowMode !== StreamingPreferences.WM_FULLSCREEN){ - checkedSaved = checked - } - // Signal others - videoEnhancementChanged() } ToolTip.delay: 1000 ToolTip.timeout: 5000 @@ -871,51 +834,22 @@ Flickable { qsTr("Enhance video quality by utilizing the GPU's AI-Enhancement capabilities.") + qsTr("\nThis feature effectively upscales, reduces compression artifacts and enhances the clarity of streamed content.") + qsTr("\nNote:") - + qsTr("\n - For optimal performance, use the software in borderless window mode; this feature is not applicable in fullscreen mode.") - + qsTr("\n - If available, ensure that appropriate settings, such as VSR (Virtual Super Resolution), are enabled in your GPU driver configurations.") + + qsTr("\n - If available, ensure that appropriate settings (i.e. RTX Video enhancement) are enabled in your GPU driver configuration.") + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") function reinitialize() { - if(typeof(checkedSaved) === "undefined"){ - checkedSaved = checked - } if(!VideoEnhancement.isUIvisible()){ checked = false - checkedSaved = checked visible = false } - // If Exclusive fullscreen is selected, disabled the VSR as it does not work in this window mode - else if(StreamingPreferences.windowMode === StreamingPreferences.WM_FULLSCREEN){ - checked = false - } - else { - // Get back the saved status - checked = checkedSaved - } // Indicate if the feature is available but not officially deployed by the Vendor if(VideoEnhancement.isExperimental()){ text = qsTr("Video AI-Enhancement (Experimental)") } } - Timer { - id: vsrTimer - interval: 300 // 0 to make it async to get the final status of StreamingPreferences.windowMode (which is set too late in the process) - running: false // Don't start the timer immediately - repeat: false // Run only once - - onTriggered: { - parent.reinitialize() - } - } - Component.onCompleted: { - checkedSaved = checked reinitialize() - windowModeChanged.connect(() => { - checked = checkedSaved - vsrTimer.start() - }) } } } From d34d7b8bda97098248d7b9b056e10ef03f2fa2bb Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 27 Feb 2024 22:39:09 +0100 Subject: [PATCH 19/53] Simplifying the initialization of the Color Space for Stream and Output --- .../video/ffmpeg-renderers/d3d11va.cpp | 58 ++----------------- .../video/ffmpeg-renderers/d3d11va.h | 17 ------ 2 files changed, 5 insertions(+), 70 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index f158c00c0..aac7ded79 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -724,7 +724,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // [TODO] With NVIDIA RTX, while renderering using VideoProcessor with HDR activated in Moonlight, // DXGI_FORMAT_R10G10B10A2_UNORM gives worse result than DXGI_FORMAT_R8G8B8A8_UNORM. // Without this fix, HDR off on server renders gray screen and VSR is inactive (DXGI_COLOR_SPACE_TYPE type 8). - // For user perspective, it is better to not see such a bug, so for the moment I choose to force DXGI_FORMAT_R8G8B8A8_UNORM + // For user perspective, it is better to not see such a bug, so for the moment I choose to force DXGI_FORMAT_R8G8B8A8_UNORM if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_VideoEnhancement->isVendorNVIDIA()){ swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; } @@ -1138,32 +1138,6 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) bool frameFullRange = isFrameFullRange(frame); int frameColorSpace = getFrameColorspace(frame); - // [TODO] Fix the bug with VideoProcessorSetStreamColorSpace1 not working from the first frame - // BUG: If I try to set m_StreamColorSpace correctly since the beginning (=14), the renderer doesn't apply the color space, - // The frame appear gray. The temporary fix is to start from a wrong Color space (13), and switch to 14 after few milliseconds. - // At set the time to 100ms to not have any visual impact at loading, but even 1ms fix the issue, the bug might be linked to the 1st frame. - // This is a non-blocking issue, but I need to investigate further the reason of such a behavior. - auto now = std::chrono::system_clock::now(); - long nowTime = std::chrono::duration_cast(now.time_since_epoch()).count(); - if(m_SetStreamColorSpace && nowTime >= m_NextTime){ - m_NextTime = nowTime + m_Increment; - if(m_StreamIndex >= 1){ - m_SetStreamColorSpace = false; - } - switch (frameColorSpace) { - case COLORSPACE_REC_2020: - m_StreamColorSpace = m_StreamColorSpacesFixHDR[m_StreamIndex]; - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); - break; - default: - m_StreamColorSpace = m_StreamColorSpacesFixSDR[m_StreamIndex]; - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); - } - if(m_SetStreamColorSpace){ - m_StreamIndex++; - } - } - // If nothing has changed since last frame, we're done if (frameColorSpace == m_LastColorSpace && frameFullRange == m_LastFullRange) { return; @@ -1174,39 +1148,24 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) switch (frameColorSpace) { case COLORSPACE_REC_2020: - // For an unclear reason in HDR mode (D3D11 bug?), when the 4 following filters, Brightness (0), Contrast (100), hue (0) and saturation (100), - // are all together at their default value, the tone tends to slight red. It is easy to see when streaming its own screen - // using an inception effect. - // The solution is the set Hue at -1, it doesn't impact the visual (compare to others), and it fixes the color issue. - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, true, -1); // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disabled (which is fine as we already have native HDR) - m_StreamColorSpace = m_ColorSpaces[14]; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); if(m_VideoEnhancement->isVendorNVIDIA()){ // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. } - // Reset the fix HDR Stream - m_SetStreamColorSpace = true; - m_StreamIndex = 0; break; default: - // For SDR we can use default values. - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_HUE, false, 0); // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering - m_StreamColorSpace = m_ColorSpaces[8]; + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); if(m_VideoEnhancement->isVendorNVIDIA()){ // Always enable NVIDIA VSR for SDR Stream content enableNvidiaVideoSuperResolution(); } - // Reset the fix SDR Stream as it does work when back and forth with HDR on Server - m_SetStreamColorSpace = true; - m_StreamIndex = 0; } - - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); } void D3D11VARenderer::renderVideo(AVFrame* frame) @@ -1255,12 +1214,6 @@ bool D3D11VARenderer::createVideoProcessor() { HRESULT hr; - // [TODO] This timer is only used to fix a problem with VideoProcessorSetStreamColorSpace1 not properly applied at the beginning - // These 3 lines can be removed once the bug (non-blocking) is fixed. - auto now = std::chrono::system_clock::now(); - m_StartTime = std::chrono::duration_cast(now.time_since_epoch()).count(); - m_NextTime = m_StartTime + m_Increment; - D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; ZeroMemory(&content_desc, sizeof(content_desc)); @@ -1404,9 +1357,8 @@ bool D3D11VARenderer::createVideoProcessor() // As no effect as the picture is not distorted m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) - - m_SetStreamColorSpace = true; - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, m_StreamColorSpace); + // Default on SDR, it will switch to HDR automatically at the 1st frame received if the Stream source has HDR active. + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); return true; } diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 3658eaec1..0c60c81d0 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -101,23 +101,6 @@ class D3D11VARenderer : public IFFmpegRenderer DXGI_COLOR_SPACE_CUSTOM, // 25 }; - DXGI_COLOR_SPACE_TYPE m_StreamColorSpace = m_ColorSpaces[8]; // SDR-Only (HDR is 14) - DXGI_COLOR_SPACE_TYPE m_OutputColorSpace = m_ColorSpaces[12]; // SDR & HDR - - // [TODO] Remove the timer feature once the bug with VideoProcessorSetStreamColorSpace1 is fixed - bool m_SetStreamColorSpace = true; - long m_StartTime; - long m_NextTime; - int m_StreamIndex = 0; - int m_Increment = 300; - DXGI_COLOR_SPACE_TYPE m_StreamColorSpacesFixHDR[2] = { - DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 - DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 - }; - DXGI_COLOR_SPACE_TYPE m_StreamColorSpacesFixSDR[2] = { - DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 - DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 - }; ID3D11ShaderResourceView* m_VideoTextureResourceView; DECODER_PARAMETERS m_DecoderParams; From b10243be92f1c82fb626675a2493abe4a0104eb6 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 09:14:48 +0100 Subject: [PATCH 20/53] For Multi-GPU setup, select most appropriate GPU when Video Enhancement is On - Simplification of the class VideoEnhancement as all properties will be set at D3D11va initialization - Since it never change during a whole session, only scan all GPU once at the application launch and keep track of the most suitable adapter index with VideoEnhancement->m_AdapterIndex. - Adapt setHDRoutput as the adapter might be different (not the one linked to the display). - In case Video Enhancement is Off, we keep using the previous behavior (=using the adapter linked to the display). --- .../video/ffmpeg-renderers/d3d11va.cpp | 717 +++++++++++------- .../video/ffmpeg-renderers/d3d11va.h | 14 +- app/streaming/video/videoenhancement.cpp | 208 +++-- app/streaming/video/videoenhancement.h | 32 +- 4 files changed, 556 insertions(+), 415 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index aac7ded79..6cd14b481 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -11,7 +11,6 @@ #include "streaming/video/videoenhancement.h" #include -#include #include #include #include @@ -195,7 +194,6 @@ void D3D11VARenderer::setHDRStream(){ // Prepare HDR Meta Data for Stream content SS_HDR_METADATA hdrMetadata; if (m_VideoProcessor && LiGetHdrMetadata(&hdrMetadata)) { - streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; streamHDRMetaData.GreenPrimary[0] = hdrMetadata.displayPrimaries[1].x; @@ -232,48 +230,61 @@ void D3D11VARenderer::setHDRStream(){ * \return bool Return True is succeed */ void D3D11VARenderer::setHDROutPut(){ - DXGI_HDR_METADATA_HDR10 outputHDRMetaData; - // Find the monitor attached to the application - Microsoft::WRL::ComPtr pOutput; - if (SUCCEEDED(m_SwapChain->GetContainingOutput(&pOutput))) { - Microsoft::WRL::ComPtr pOutput6; - if (SUCCEEDED(pOutput.As(&pOutput6))){ - DXGI_OUTPUT_DESC1 desc1 {}; - if (SUCCEEDED(pOutput6->GetDesc1(&desc1))){ - // Magic constants to convert to fixed point. - // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 - static constexpr int kPrimariesFixedPoint = 50000; - static constexpr int kMinLuminanceFixedPoint = 10000; - - // Format Monitor HDR MetaData - outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; - outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; - outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; - outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; - // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. - outputHDRMetaData.MaxContentLightLevel = 0; - outputHDRMetaData.MaxFrameAverageLightLevel = 0; - - if (m_VideoProcessor) { - // Prepare HDR for the OutPut Monitor - m_VideoContext->VideoProcessorSetOutputHDRMetaData( - m_VideoProcessor.Get(), - DXGI_HDR_METADATA_TYPE_HDR10, - sizeof(DXGI_HDR_METADATA_HDR10), - &outputHDRMetaData - ); + if (m_VideoProcessor){ + + IDXGIFactory1* factory = nullptr; + CreateDXGIFactory1(__uuidof(IDXGIFactory1), (void**)&factory); + + IDXGIAdapter1* adapter = nullptr; + for (UINT adapterIndex = 0; SUCCEEDED(factory->EnumAdapters1(adapterIndex, &adapter)); ++adapterIndex) { + IDXGIOutput* output = nullptr; + for (UINT outputIndex = 0; SUCCEEDED(adapter->EnumOutputs(outputIndex, &output)); ++outputIndex) { + IDXGIOutput6* output6 = nullptr; + if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { + DXGI_OUTPUT_DESC1 desc1; + if (output6) { + output6->GetDesc1(&desc1); + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kPrimariesFixedPoint = 50000; + static constexpr int kMinLuminanceFixedPoint = 10000; + + // Format Monitor HDR MetaData + outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; + outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; + outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; + // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. + outputHDRMetaData.MaxContentLightLevel = 0; + outputHDRMetaData.MaxFrameAverageLightLevel = 0; + + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor.Get(), + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + } + + break; } + output6->Release(); } + adapter->Release(); + // Break early if we've found an IDXGIOutput + if (output) + break; } - } + } } bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) @@ -339,33 +350,15 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } - // Get video device - if (!m_VideoDevice) { - hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), - (void**)&m_VideoDevice); - if (FAILED(hr)) { - return false; - } - } - - // Get video context - if (!m_VideoContext) { - hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), - (void**)&m_VideoContext); - if (FAILED(hr)) { - return false; - } - } + createVideoProcessor(); if (!checkDecoderSupport(adapter)) { - m_DeviceContext->Release(); - m_DeviceContext = nullptr; - m_Device->Release(); - m_Device = nullptr; - m_VideoContext->Release(); - m_VideoContext = nullptr; - m_VideoDevice->Release(); - m_VideoDevice = nullptr; + SAFE_COM_RELEASE(m_DeviceContext); + SAFE_COM_RELEASE(m_Device); + SAFE_COM_RELEASE(m_VideoContext); + SAFE_COM_RELEASE(m_VideoDevice); + m_VideoProcessorEnumerator = nullptr; + m_VideoProcessor = nullptr; goto Exit; } @@ -380,24 +373,168 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter return success; } +/** + * \brief Get the Adapter Index based on Video enhancement capabilities + * + * In case of multiple GPUs, get the most appropriate GPU available based on accessible capabilities + * and priority of Vendor implementation status (NVIDIA -> AMD -> Intel -> Others). + * + * \return int Returns an Adapter index + */ +int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() +{ + IDXGIAdapter1* adapter = nullptr; + DXGI_ADAPTER_DESC1 adapterDesc; + + int highestScore = -1; + int adapterIndex = -1; + int index = 0; + while(m_Factory->EnumAdapters1(index, &adapter) != DXGI_ERROR_NOT_FOUND) + { + if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { + + if (adapterDesc.Flags & DXGI_ADAPTER_FLAG_SOFTWARE) { + // Skip the WARP device. We know it will fail. + index++; + continue; + } + + SAFE_COM_RELEASE(m_DeviceContext); + SAFE_COM_RELEASE(m_Device); + SAFE_COM_RELEASE(m_VideoContext); + SAFE_COM_RELEASE(m_VideoDevice); + m_VideoProcessorEnumerator = nullptr; + m_VideoProcessor = nullptr; + + if (SUCCEEDED(D3D11CreateDevice( + adapter, + D3D_DRIVER_TYPE_UNKNOWN, + nullptr, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, + nullptr, + 0, + D3D11_SDK_VERSION, + &m_Device, + nullptr, + &m_DeviceContext)) + && createVideoProcessor()){ + + // VSR has the priority over HDR in term of capability we want to use. + // The priority value may change over the time, + // below statement has been established based on drivers' capabilities status by February 29th 2024. + + int score = -1; + + // Video Super Resolution + if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDVideoSuperResolution()){ + score = std::max(score, 200); + } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelVideoSuperResolution()){ + score = std::max(score, 100); + } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaVideoSuperResolution()){ + score = std::max(score, 300); + } + + // SDR to HDR auto conversion + if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDHDR()){ + score = std::max(score, 20); + } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelHDR()){ + score = std::max(score, 10); + } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaHDR()){ + score = std::max(score, 30); + } + + // Recording the highest score, which will represent the most capable adapater for Video enhancement + if(score > highestScore){ + adapterIndex = index; + } + } + + + + } + + index++; + } + + // Set Video enhancement information + if(adapterIndex >= 0 && m_Factory->EnumAdapters1(adapterIndex, &adapter) != DXGI_ERROR_NOT_FOUND){ + + if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { + + SAFE_COM_RELEASE(m_DeviceContext); + SAFE_COM_RELEASE(m_Device); + SAFE_COM_RELEASE(m_VideoContext); + SAFE_COM_RELEASE(m_VideoDevice); + m_VideoProcessorEnumerator = nullptr; + m_VideoProcessor = nullptr; + + if (SUCCEEDED(D3D11CreateDevice( + adapter, + D3D_DRIVER_TYPE_UNKNOWN, + nullptr, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, + nullptr, + 0, + D3D11_SDK_VERSION, + &m_Device, + nullptr, + &m_DeviceContext)) + && createVideoProcessor()){ + + m_VideoEnhancement->setVendorID(adapterDesc.VendorId); + + // Convert wchar[128] to string + std::wstring GPUname(adapterDesc.Description); + qInfo() << "GPU used for Video Enhancmeent: " << GPUname; + + if(m_VideoEnhancement->isVendorAMD()){ + m_VideoEnhancement->setVSRcapable(enableAMDVideoSuperResolution()); + m_VideoEnhancement->setHDRcapable(enableAMDHDR()); + } else if(m_VideoEnhancement->isVendorIntel()){ + m_VideoEnhancement->setVSRcapable(enableIntelVideoSuperResolution()); + m_VideoEnhancement->setHDRcapable(enableIntelHDR()); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution()); + m_VideoEnhancement->setHDRcapable(enableNvidiaHDR()); + } + + // Enable the visibility of Video enhancement feature in the settings of the User interface + m_VideoEnhancement->enableUIvisible(); + } + } + } + + SAFE_COM_RELEASE(m_DeviceContext); + SAFE_COM_RELEASE(m_Device); + SAFE_COM_RELEASE(m_VideoContext); + SAFE_COM_RELEASE(m_VideoDevice); + m_VideoProcessorEnumerator = nullptr; + m_VideoProcessor = nullptr; + + return adapterIndex; +} + /** * \brief Enable Video Super-Resolution for AMD GPU * * This feature is available starting from AMD series 7000 and driver AMD Software 24.1.1 (Jan 23, 2024) * https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 * - * \return void + * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature + * \return bool Return true if the capability is available */ -void D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ +bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 check how to implement it // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 - if(m_VideoEnhancement->isVendorAMD() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ - // [TODO] Implement AMD Video Scaler - // Documentation and DX11 code sample - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_VQ_Enhancer_API.md - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp - } + + // [TODO] Implement AMD Video Scaler + // Documentation and DX11 code sample + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_VQ_Enhancer_API.md + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp + + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution capability is not yet supported by your client's GPU."); + return false; } /** @@ -409,86 +546,86 @@ void D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ * Values from Chromium source code: * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc * - * \return void + * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature + * \return bool Return true if the capability is available */ -void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ +bool D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ HRESULT hr; - if(m_VideoEnhancement->isVendorIntel() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ - - constexpr GUID GUID_INTEL_VPE_INTERFACE = {0xedd1d4b9, 0x8659, 0x4cbc, {0xa4, 0xd6, 0x98, 0x31, 0xa2, 0x16, 0x3a, 0xc3}}; - constexpr UINT kIntelVpeFnVersion = 0x01; - constexpr UINT kIntelVpeFnMode = 0x20; - constexpr UINT kIntelVpeFnScaling = 0x37; - constexpr UINT kIntelVpeVersion3 = 0x0003; - constexpr UINT kIntelVpeModeNone = 0x0; - constexpr UINT kIntelVpeModePreproc = 0x01; - constexpr UINT kIntelVpeScalingDefault = 0x0; - constexpr UINT kIntelVpeScalingSuperResolution = 0x2; + constexpr GUID GUID_INTEL_VPE_INTERFACE = {0xedd1d4b9, 0x8659, 0x4cbc, {0xa4, 0xd6, 0x98, 0x31, 0xa2, 0x16, 0x3a, 0xc3}}; + constexpr UINT kIntelVpeFnVersion = 0x01; + constexpr UINT kIntelVpeFnMode = 0x20; + constexpr UINT kIntelVpeFnScaling = 0x37; + constexpr UINT kIntelVpeVersion3 = 0x0003; + constexpr UINT kIntelVpeModeNone = 0x0; + constexpr UINT kIntelVpeModePreproc = 0x01; + constexpr UINT kIntelVpeScalingDefault = 0x0; + constexpr UINT kIntelVpeScalingSuperResolution = 0x2; - UINT param = 0; + UINT param = 0; - struct IntelVpeExt - { - UINT function; - void* param; - }; + struct IntelVpeExt + { + UINT function; + void* param; + }; - IntelVpeExt ext{0, ¶m}; + IntelVpeExt ext{0, ¶m}; - ext.function = kIntelVpeFnVersion; - param = kIntelVpeVersion3; + ext.function = kIntelVpeFnVersion; + param = kIntelVpeVersion3; - hr = m_VideoContext->VideoProcessorSetOutputExtension( - m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); - if (FAILED(hr)) - { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Intel VPE version failed: %x", - hr); - return; - } + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE version failed: %x", + hr); + return false; + } - ext.function = kIntelVpeFnMode; - if(activate){ - param = kIntelVpeModePreproc; - } else { - param = kIntelVpeModeNone; - } + ext.function = kIntelVpeFnMode; + if(activate){ + param = kIntelVpeModePreproc; + } else { + param = kIntelVpeModeNone; + } - hr = m_VideoContext->VideoProcessorSetOutputExtension( - m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); - if (FAILED(hr)) - { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Intel VPE mode failed: %x", - hr); - return; - } + hr = m_VideoContext->VideoProcessorSetOutputExtension( + m_VideoProcessor.Get(), &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel VPE mode failed: %x", + hr); + return false; + } - ext.function = kIntelVpeFnScaling; - if(activate){ - param = kIntelVpeScalingSuperResolution; - } else { - param = kIntelVpeScalingDefault; - } + ext.function = kIntelVpeFnScaling; + if(activate){ + param = kIntelVpeScalingSuperResolution; + } else { + param = kIntelVpeScalingDefault; + } - hr = m_VideoContext->VideoProcessorSetStreamExtension( - m_VideoProcessor.Get(), 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); - if (FAILED(hr)) - { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Intel Video Super Resolution failed: %x", - hr); - return; - } + hr = m_VideoContext->VideoProcessorSetStreamExtension( + m_VideoProcessor.Get(), 0, &GUID_INTEL_VPE_INTERFACE, sizeof(ext), &ext); + if (FAILED(hr)) + { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Intel Video Super Resolution failed: %x", + hr); + return false; + } - if(activate){ - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); - } else { - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); - } + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); } + + return true; } /** @@ -502,44 +639,43 @@ void D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ * Values from Chromium source code: * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc * - * \return void + * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature + * \return bool Return true if the capability is available */ -void D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ +bool D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ HRESULT hr; + // Toggle VSR + constexpr GUID GUID_NVIDIA_PPE_INTERFACE = {0xd43ce1b3, 0x1f4b, 0x48ac, {0xba, 0xee, 0xc3, 0xc2, 0x53, 0x75, 0xe6, 0xf7}}; + constexpr UINT kStreamExtensionVersionV1 = 0x1; + constexpr UINT kStreamExtensionMethodSuperResolution = 0x2; - if(m_VideoEnhancement->isVendorNVIDIA() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isVSRcapable()){ - - // Toggle VSR - constexpr GUID GUID_NVIDIA_PPE_INTERFACE = {0xd43ce1b3, 0x1f4b, 0x48ac, {0xba, 0xee, 0xc3, 0xc2, 0x53, 0x75, 0xe6, 0xf7}}; - constexpr UINT kStreamExtensionVersionV1 = 0x1; - constexpr UINT kStreamExtensionMethodSuperResolution = 0x2; - - struct NvidiaStreamExt - { - UINT version; - UINT method; - UINT enable; - }; + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable; + }; - // Convert bool to UINT - UINT enable = activate; + // Convert bool to UINT + UINT enable = activate; - NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV1, kStreamExtensionMethodSuperResolution, enable}; - hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); - if (FAILED(hr)) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "NVIDIA RTX Video Super Resolution failed: %x", - hr); - return; - } + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV1, kStreamExtensionMethodSuperResolution, enable}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_PPE_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX Video Super Resolution failed: %x", + hr); + return false; + } - if(activate){ - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); - } else { - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); - } + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); } + + return true; } /** @@ -547,12 +683,15 @@ void D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ * * This feature is not availble for AMD, and has not yet been announced (by Jan 24th, 2024) * - * \return void + * \param bool activate Default is true, at true it enables the use of HDR feature + * \return bool Return true if the capability is available */ -void D3D11VARenderer::enableAMDHDR(bool activate){ - if(m_VideoEnhancement->isVendorAMD() && m_VideoEnhancement->isHDRcapable()){ - // [TODO] Feature not yet announced - } +bool D3D11VARenderer::enableAMDHDR(bool activate){ + + // [TODO] Feature not yet announced + + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD HDR capability is not yet supported by your client's GPU."); + return false; } /** @@ -560,12 +699,15 @@ void D3D11VARenderer::enableAMDHDR(bool activate){ * * This feature is not availble for Intel, and has not yet been announced (by Jan 24th, 2024) * - * \return void + * \param bool activate Default is true, at true it enables the use of HDR feature + * \return bool Return true if the capability is available */ -void D3D11VARenderer::enableIntelHDR(bool activate){ - if(m_VideoEnhancement->isVendorIntel() && m_VideoEnhancement->isHDRcapable()){ - // [TODO] Feature not yet announced - } +bool D3D11VARenderer::enableIntelHDR(bool activate){ + + // [TODO] Feature not yet announced + + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel HDR capability is not yet supported by your client's GPU."); + return false; } /** @@ -580,44 +722,44 @@ void D3D11VARenderer::enableIntelHDR(bool activate){ * Values from Chromium source code: * https://chromium.googlesource.com/chromium/src/+/master/ui/gl/swap_chain_presenter.cc * - * \return void + * \param bool activate Default is true, at true it enables the use of HDR feature + * \return bool Return true if the capability is available */ -void D3D11VARenderer::enableNvidiaHDR(bool activate){ +bool D3D11VARenderer::enableNvidiaHDR(bool activate){ HRESULT hr; - if(m_VideoEnhancement->isVendorNVIDIA() && m_VideoEnhancement->isEnhancementCapable() && m_VideoEnhancement->isHDRcapable()){ + // Toggle HDR + constexpr GUID GUID_NVIDIA_TRUE_HDR_INTERFACE = {0xfdd62bb4, 0x620b, 0x4fd7, {0x9a, 0xb3, 0x1e, 0x59, 0xd0, 0xd5, 0x44, 0xb3}}; + constexpr UINT kStreamExtensionVersionV4 = 0x4; + constexpr UINT kStreamExtensionMethodTrueHDR = 0x3; - // Toggle HDR - constexpr GUID GUID_NVIDIA_TRUE_HDR_INTERFACE = {0xfdd62bb4, 0x620b, 0x4fd7, {0x9a, 0xb3, 0x1e, 0x59, 0xd0, 0xd5, 0x44, 0xb3}}; - constexpr UINT kStreamExtensionVersionV4 = 0x4; - constexpr UINT kStreamExtensionMethodTrueHDR = 0x3; - - struct NvidiaStreamExt - { - UINT version; - UINT method; - UINT enable : 1; - UINT reserved : 31; - }; + struct NvidiaStreamExt + { + UINT version; + UINT method; + UINT enable : 1; + UINT reserved : 31; + }; - // Convert bool to UINT - UINT enable = activate; + // Convert bool to UINT + UINT enable = activate; - NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV4, kStreamExtensionMethodTrueHDR, enable, 0u}; - hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); - if (FAILED(hr)) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "NVIDIA RTX HDR failed: %x", - hr); - return; - } + NvidiaStreamExt stream_extension_info = {kStreamExtensionVersionV4, kStreamExtensionMethodTrueHDR, enable, 0u}; + hr = m_VideoContext->VideoProcessorSetStreamExtension(m_VideoProcessor.Get(), 0, &GUID_NVIDIA_TRUE_HDR_INTERFACE, sizeof(stream_extension_info), &stream_extension_info); + if (FAILED(hr)) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "NVIDIA RTX HDR failed: %x", + hr); + return false; + } - if(activate){ - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); - } else { - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); - } + if(activate){ + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); + } else { + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); } + + return true; } bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) @@ -639,6 +781,9 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } + // By default try the adapter corresponding to the display where our window resides. + // This will let us avoid a copy if the display GPU has the required decoder. + // If Video enhancement is enabled, it will look for the most capable GPU in case of multiple GPUs. if (!SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(params->window), &m_AdapterIndex, &m_OutputIndex)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -655,8 +800,30 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - // First try the adapter corresponding to the display where our window resides. - // This will let us avoid a copy if the display GPU has the required decoder. + // If getAdapterIndex return 0+, it means that we already identified which adapter best fit for Video enhancement, + // so we don't have to estimate it more times to speed up the launch of the streaming. + if(m_VideoEnhancement->getAdapterIndex() < 0){ + int adapterIndex = getAdapterIndexByEnhancementCapabilities(); + if(adapterIndex >= 0){ + m_VideoEnhancement->setAdapterIndex(adapterIndex); + } else { + m_VideoEnhancement->setAdapterIndex(m_AdapterIndex); + } + } + + if(m_VideoEnhancement->isEnhancementCapable()){ + // Check if the user has enable Video enhancement + StreamingPreferences streamingPreferences; + m_VideoEnhancement->enableVideoEnhancement(streamingPreferences.videoEnhancement); + } + + // Set the adapter index of the most appropriate GPU + if( + m_VideoEnhancement->isVideoEnhancementEnabled() + && m_VideoEnhancement->getAdapterIndex() >= 0 + ){ + m_AdapterIndex = m_VideoEnhancement->getAdapterIndex(); + } if (!createDeviceByAdapterIndex(m_AdapterIndex)) { // If that didn't work, we'll try all GPUs in order until we find one // or run out of GPUs (DXGI_ERROR_NOT_FOUND from EnumAdapters()) @@ -680,6 +847,31 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + // Set VSR and HDR + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Enable VSR feature if available + if(m_VideoEnhancement->isVSRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaVideoSuperResolution(); + } + } + + // Enable SDR->HDR feature if available + if(m_VideoEnhancement->isHDRcapable()){ + if(m_VideoEnhancement->isVendorAMD()){ + enableAMDHDR(); + } else if(m_VideoEnhancement->isVendorIntel()){ + enableIntelHDR(); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + enableNvidiaHDR(); + } + } + } + DXGI_SWAP_CHAIN_DESC1 swapChainDesc = {}; swapChainDesc.Stereo = FALSE; swapChainDesc.SampleDesc.Count = 1; @@ -825,8 +1017,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // AVHWDeviceContext takes ownership of these objects d3d11vaDeviceContext->device = m_Device; d3d11vaDeviceContext->device_context = m_DeviceContext; - d3d11vaDeviceContext->video_device = m_VideoDevice; - d3d11vaDeviceContext->video_context = m_VideoContext; // Set lock functions that we will use to synchronize with FFmpeg's usage of our device context d3d11vaDeviceContext->lock = lockContext; @@ -847,43 +1037,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - // Check if the GPU is capable of AI-Enhancement - // This capability setup is place in this method because it is only available on FFmpeg with DirectX for hardware acceleration - m_VideoEnhancement->enableVideoEnhancement(false); - if(m_VideoEnhancement->isEnhancementCapable()){ - - // Enable the visibility of Video enhancement feature - m_VideoEnhancement->enableUIvisible(); - - StreamingPreferences streamingPreferences; - if(streamingPreferences.videoEnhancement){ - - if(createVideoProcessor()){ - m_VideoEnhancement->enableVideoEnhancement(true); - } - - // Enable VSR feature if available - if(m_VideoEnhancement->isVSRcapable()){ - if(m_VideoEnhancement->isVendorAMD()){ - enableAMDVideoSuperResolution(); - } else if(m_VideoEnhancement->isVendorIntel()){ - enableIntelVideoSuperResolution(); - } else if(m_VideoEnhancement->isVendorNVIDIA()){ - enableNvidiaVideoSuperResolution(); - } - } - - // Enable SDR->HDR feature if available - if(m_VideoEnhancement->isHDRcapable()){ - if(m_VideoEnhancement->isVendorAMD()){ - enableAMDHDR(); - } else if(m_VideoEnhancement->isVendorIntel()){ - enableIntelHDR(); - } else if(m_VideoEnhancement->isVendorNVIDIA()){ - enableNvidiaHDR(); - } - } - } + if(m_VideoProcessor){ + initializeVideoProcessor(); } SAFE_COM_RELEASE(m_BackBufferResource); @@ -1213,25 +1368,23 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) bool D3D11VARenderer::createVideoProcessor() { HRESULT hr; - D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; - ZeroMemory(&content_desc, sizeof(content_desc)); - if (m_VideoProcessor && m_VideoProcessorEnumerator) { - hr = m_VideoProcessorEnumerator->GetVideoProcessorContentDesc(&content_desc); - if (FAILED(hr)) - return false; + m_VideoProcessorEnumerator = nullptr; + m_VideoProcessor = nullptr; - if (content_desc.InputWidth != m_DecoderParams.width || - content_desc.InputHeight != m_DecoderParams.height || - content_desc.OutputWidth != m_DisplayWidth || - content_desc.OutputHeight != m_DisplayHeight) { - m_VideoProcessorEnumerator->Release(); - m_VideoProcessor->Release(); - } - else { - return true; - } + // Get video device + hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), + (void**)&m_VideoDevice); + if (FAILED(hr)) { + return false; + } + + // Get video context + hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), + (void**)&m_VideoContext); + if (FAILED(hr)) { + return false; } ZeroMemory(&content_desc, sizeof(content_desc)); @@ -1255,6 +1408,20 @@ bool D3D11VARenderer::createVideoProcessor() if (FAILED(hr)) return false; + return true; +} + +/** + * \brief Set the Video Processor to the pipeline + * + * Set proper Color space, filtering, and additional GPU video processing method like AI Upscaling + * + * \return bool Returns true if the Video processor is successfully setup + */ +bool D3D11VARenderer::initializeVideoProcessor() +{ + HRESULT hr; + m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor.Get(), 0, false); m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); @@ -1350,12 +1517,22 @@ bool D3D11VARenderer::createVideoProcessor() } // The section is a customization to enhance (non-AI) shlithly the frame + int noiseReduction = 0; + int edgeEnhancement = 0; + if(m_VideoEnhancement->isVendorAMD()){ + noiseReduction = 30; + edgeEnhancement = 50; + } else if(m_VideoEnhancement->isVendorIntel()){ + noiseReduction = 30; + edgeEnhancement = 30; + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + noiseReduction = 30; + edgeEnhancement = 50; + } // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, noiseReduction); // (0 / 0 / 100) // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 50); // (0 / 0 / 100) - // As no effect as the picture is not distorted - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_ANAMORPHIC_SCALING, true, 100); // (0 / 0 / 100) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, edgeEnhancement); // (0 / 0 / 100) // Default on SDR, it will switch to HDR automatically at the 1st frame received if the Stream source has HDR active. m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 0c60c81d0..8a070ed37 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -37,13 +37,15 @@ class D3D11VARenderer : public IFFmpegRenderer void prepareVideoProcessorStream(AVFrame* frame); void renderVideo(AVFrame* frame); bool createVideoProcessor(); - void enableAMDVideoSuperResolution(bool activate = true); - void enableIntelVideoSuperResolution(bool activate = true); - void enableNvidiaVideoSuperResolution(bool activate = true); - void enableAMDHDR(bool activate = true); - void enableIntelHDR(bool activate = true); - void enableNvidiaHDR(bool activate = true); + bool initializeVideoProcessor(); + bool enableAMDVideoSuperResolution(bool activate = true); + bool enableIntelVideoSuperResolution(bool activate = true); + bool enableNvidiaVideoSuperResolution(bool activate = true); + bool enableAMDHDR(bool activate = true); + bool enableIntelHDR(bool activate = true); + bool enableNvidiaHDR(bool activate = true); bool checkDecoderSupport(IDXGIAdapter* adapter); + int getAdapterIndexByEnhancementCapabilities(); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); void setHDRStream(); void setHDROutPut(); diff --git a/app/streaming/video/videoenhancement.cpp b/app/streaming/video/videoenhancement.cpp index fbf0454ed..f7bad2376 100644 --- a/app/streaming/video/videoenhancement.cpp +++ b/app/streaming/video/videoenhancement.cpp @@ -1,28 +1,15 @@ #include "videoenhancement.h" -#include -#include -#include -#include - -#include - -#pragma comment(lib, "Advapi32.lib") /** * \brief Constructor (Singleton) * - * Check the capacity to handle the AI-Enhancement features such as Video Super-Resolution or SDR to HDR, according to multiple parameters such as OS or Video driver. + * VideoEnhancement does not set its properties automatically at instance initiation, + * it depends on D3D11va. Therefore, it needs to be populated at the initialization of + * the rendered D3D11VARenderer::initialize(). * * \return void */ -VideoEnhancement::VideoEnhancement() -{ - if(!m_Initialized){ - setGPUinformation(); - // Avoid to set variables every call of the instance - m_Initialized = true; - } -} +VideoEnhancement::VideoEnhancement(){} /** * \brief Get the singleton instance @@ -31,79 +18,38 @@ VideoEnhancement::VideoEnhancement() * * \return VideoEnhancement instance */ -VideoEnhancement &VideoEnhancement::getInstance() -{ +VideoEnhancement &VideoEnhancement::getInstance(){ static VideoEnhancement instance; return instance; } /** - * \brief Retreive GPU information - * - * Retreive all GPU information: Vendor ID, Driver version, GPU name - * - * \return bool Returns true if it successfully retreived the GPU information - */ -bool VideoEnhancement::setGPUinformation() -{ - bool success = false; - - // Create a Direct3D 11 device - ID3D11Device* pD3DDevice = nullptr; - ID3D11DeviceContext* pContext = nullptr; - - HRESULT hr = D3D11CreateDevice( - nullptr, - D3D_DRIVER_TYPE_HARDWARE, - nullptr, - D3D11_CREATE_DEVICE_DEBUG, - nullptr, - 0, - D3D11_SDK_VERSION, - &pD3DDevice, - nullptr, - &pContext - ); - - IDXGIAdapter* pAdapter = nullptr; - IDXGIDevice* pDXGIDevice = nullptr; - // Get the DXGI device from the D3D11 device. - // It identifies which GPU is being used by the application in case of multiple one (like a iGPU with a dedicated GPU). - if (SUCCEEDED(hr) && SUCCEEDED(pD3DDevice->QueryInterface(__uuidof(IDXGIDevice), (void**)&pDXGIDevice))) { - // Get the DXGI adapter from the DXGI device - if (SUCCEEDED(pDXGIDevice->GetAdapter(&pAdapter))) { - DXGI_ADAPTER_DESC adapterIdentifier; - if (SUCCEEDED(pAdapter->GetDesc(&adapterIdentifier))) { - // Convert wchar[128] to string - std::wstring description(adapterIdentifier.Description); - - // Set GPU information - m_VendorId = adapterIdentifier.VendorId; - m_GPUname = description; - - LARGE_INTEGER umd_version; - pAdapter->CheckInterfaceSupport(__uuidof(IDXGIDevice), &umd_version); - // Integer part - m_DriverVersion = HIWORD(umd_version.LowPart); - // Decimal part - double digits = static_cast(floor(log10(LOWORD(umd_version.LowPart))) + 1); - if(digits > 0){ - m_DriverVersion += LOWORD(umd_version.LowPart) / pow(10, digits); - } - - qInfo() << "Active GPU: " << m_GPUname; - qInfo() << "Video Driver: " << m_DriverVersion; - - } - } + * \brief Set the Adapter Index + * + * \return void + */ +void VideoEnhancement::setAdapterIndex(int adapterIndex){ + if (adapterIndex > 0){ + m_AdapterIndex = adapterIndex; } +} - // Release resources - if (pD3DDevice) pD3DDevice->Release(); - if (pDXGIDevice) pDXGIDevice->Release(); - if (pAdapter) pAdapter->Release(); +/** + * \brief Get the Adapter Index + * + * \return int Returns the Index of the most capable adapter for Video enhancement + */ +int VideoEnhancement::getAdapterIndex(){ + return m_AdapterIndex; +} - return success; +/** + * \brief Set Vendor ID + * + * \return void + */ +void VideoEnhancement::setVendorID(int vendorId){ + m_VendorId = vendorId; } /** @@ -115,6 +61,16 @@ bool VideoEnhancement::isVendorAMD(){ return m_VendorId == VENDOR_ID_AMD; } +/** + * \brief Check if the vendor is AMD + * + * \param int vendorId Vendor ID + * \return bool Returns true is the vendor is AMD + */ +bool VideoEnhancement::isVendorAMD(int vendorId){ + return vendorId == VENDOR_ID_AMD; +} + /** * \brief Check if the vendor is Intel * @@ -124,6 +80,16 @@ bool VideoEnhancement::isVendorIntel(){ return m_VendorId == VENDOR_ID_INTEL; } +/** + * \brief Check if the vendor is Intel + * + * \param int vendorId Vendor ID + * \return bool Returns true is the vendor is Intel + */ +bool VideoEnhancement::isVendorIntel(int vendorId){ + return vendorId == VENDOR_ID_INTEL; +} + /** * \brief Check if the vendor is NVIDIA * @@ -133,56 +99,58 @@ bool VideoEnhancement::isVendorNVIDIA(){ return m_VendorId == VENDOR_ID_NVIDIA; } +/** + * \brief Check if the vendor is NVIDIA + * + * \param int vendorId Vendor ID + * \return bool Returns true is the vendor is NVIDIA + */ +bool VideoEnhancement::isVendorNVIDIA(int vendorId){ + return vendorId == VENDOR_ID_NVIDIA; +} + +/** + * \brief Set the Video Super-Resolution capability + * + * Keep track if the adapter is capable of Video Super-Resolution + * + * \return void + */ +void VideoEnhancement::setVSRcapable(bool capable){ + m_VSRcapable = capable; +} + /** * \brief Check the Video Super-Resolution capability * - * Check if the GPU used is capable of providing VSR feature according to its serie or driver version + * Check if the GPU used is capable of providing VSR feature * * \return bool Returns true if the VSR feature is available */ bool VideoEnhancement::isVSRcapable(){ - if(isVendorAMD()){ - // [TODO] To be done once AMD provides the VSR solution - // Driver > 24 && RX 7000+ - } else if(isVendorIntel()){ - // All CPU with iGPU (Gen 10th), or dedicated GPU, are capable - if(m_DriverVersion >= MIN_VSR_DRIVER_VERSION_INTEL){ - return true; - } - } else if(isVendorNVIDIA()){ - // RTX VSR v1.5 supports NVIDIA RTX Series 20 starting from the Windows drive 545.84 (Oct 17, 2023). - if( - m_GPUname.find(L"RTX") != std::wstring::npos - && m_DriverVersion >= MIN_VSR_DRIVER_VERSION_NVIDIA - ){ - return true; - } - } - return false; + return m_VSRcapable; +} + +/** + * \brief Set the HDR capability + * + * Keep track if the adapter is capable of SDR to HDR + * + * \return void + */ +void VideoEnhancement::setHDRcapable(bool capable){ + m_HDRcapable = capable; } /** * \brief Check the HDR capability * - * Check if the GPU used is capable of providing SDR to HDR feature according to its serie or driver version + * Check if the GPU used is capable of providing SDR to HDR feature * * \return bool Returns true if the HDR feature is available */ bool VideoEnhancement::isHDRcapable(){ - if(isVendorAMD()){ - // Not yet announced by AMD - } else if(isVendorIntel()){ - // Not yet announced by Intel - } else if(isVendorNVIDIA()){ - // RTX VSR v1.5 supports NVIDIA RTX Series 20 starting from the Windows drive 545.84 (Oct 17, 2023). - if( - m_GPUname.find(L"RTX") != std::wstring::npos - && m_DriverVersion >= MIN_HDR_DRIVER_VERSION_NVIDIA - ){ - return true; - } - } - return false; + return m_HDRcapable; } /** @@ -193,7 +161,7 @@ bool VideoEnhancement::isHDRcapable(){ * \return bool Returns true if the such capability is available */ bool VideoEnhancement::isEnhancementCapable(){ - return isVSRcapable() || isHDRcapable(); + return m_VSRcapable || m_HDRcapable; } /** @@ -220,7 +188,7 @@ bool VideoEnhancement::enableVideoEnhancement(bool activate){ * \brief Enable Video Enhancement accessibility from the settings interface * * \param bool visible Default is true, at true it displays Video Enhancement feature - * \return bool Returns true if the Video Enhancement feature is available + * \return void */ void VideoEnhancement::enableUIvisible(bool visible){ m_UIvisible = visible; @@ -241,6 +209,8 @@ bool VideoEnhancement::isUIvisible(){ * \return bool Returns true if the Video Enhancement feature is experimental */ bool VideoEnhancement::isExperimental(){ - // [Jan 31st 2024] AMD's is not yet available, Intel's is experimental, NVIDIA's is official + // Only Intel is experimental, NVIDIA and AMD are official + // [ToDo] If Intel officially release the feature, we can return false or just delete + // this method and the QML logic associated. return isVendorIntel(); } diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 6d0493036..2d30f094f 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -4,10 +4,6 @@ #pragma once #include -#include -#include -#include -#include class VideoEnhancement : public QObject { @@ -17,29 +13,20 @@ class VideoEnhancement : public QObject static VideoEnhancement* instance; - bool m_Initialized = false; + // bool m_Initialized = false; bool m_Enabled = false; bool m_UIvisible = false; + bool m_VSRcapable = false; + bool m_HDRcapable = false; // Vendors' name (PCI Special Interest Group) const int VENDOR_ID_AMD = 0x1002; const int VENDOR_ID_INTEL = 0x8086; const int VENDOR_ID_NVIDIA = 0x10DE; - // Minimum driver version accepted for VSR feature - const double MIN_VSR_DRIVER_VERSION_AMD = 21910.5; // AMD Driver Version 23.19.10 (Jan 23rd, 2024) - const double MIN_VSR_DRIVER_VERSION_INTEL = 100.8681; // Intel Driver Version 27.20.100.8681 (Sept 15, 2020) - const double MIN_VSR_DRIVER_VERSION_NVIDIA = 15.4584; // NVIDIA Driver Version 545.84 (Oct 13, 2023) - - // Minimum driver version accepted for HDR feature - const double MIN_HDR_DRIVER_VERSION_AMD = 0; // To be determined, this feature has not yet been announced by AMD - const double MIN_HDR_DRIVER_VERSION_INTEL = 0; // To be determined, this feature has not yet been announced by Intel - const double MIN_HDR_DRIVER_VERSION_NVIDIA = 15.5123; // https://www.nvidia.com/download/driverResults.aspx/218114/en-us/ - // GPU information int m_VendorId = 0; - std::wstring m_GPUname = L"Unknown"; - double m_DriverVersion = 0; + int m_AdapterIndex = -1; // Disable the constructor from outside to avoid a new instance VideoEnhancement(); @@ -48,20 +35,25 @@ class VideoEnhancement : public QObject VideoEnhancement(const VideoEnhancement&); VideoEnhancement& operator=(const VideoEnhancement&); - bool setGPUinformation(); - int getVideoDriverInfo(); - public: static VideoEnhancement& getInstance(); + void setVendorID(int vendorId); bool isVendorAMD(); + bool isVendorAMD(int vendorId); bool isVendorIntel(); + bool isVendorIntel(int vendorId); bool isVendorNVIDIA(); + bool isVendorNVIDIA(int vendorId); bool isEnhancementCapable(); + void setVSRcapable(bool capable); bool isVSRcapable(); + void setHDRcapable(bool capable); bool isHDRcapable(); bool isVideoEnhancementEnabled(); bool enableVideoEnhancement(bool activate = true); void enableUIvisible(bool visible = true); + void setAdapterIndex(int adapterIndex); + int getAdapterIndex(); Q_INVOKABLE bool isUIvisible(); Q_INVOKABLE bool isExperimental(); From cbc9fd36d2a8acb09b1a8912679fbe84599cf404 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 12:51:19 +0100 Subject: [PATCH 21/53] Update setHDRoutput to match the display in-use - Update setHDRoutput in case of multiple displays, to make sure we get the HDR information of the display where Moonlight is displayed --- .../video/ffmpeg-renderers/d3d11va.cpp | 92 +++++++++---------- 1 file changed, 44 insertions(+), 48 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 6cd14b481..1304f9bac 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -234,55 +234,54 @@ void D3D11VARenderer::setHDROutPut(){ if (m_VideoProcessor){ - IDXGIFactory1* factory = nullptr; - CreateDXGIFactory1(__uuidof(IDXGIFactory1), (void**)&factory); - - IDXGIAdapter1* adapter = nullptr; - for (UINT adapterIndex = 0; SUCCEEDED(factory->EnumAdapters1(adapterIndex, &adapter)); ++adapterIndex) { + // Retreive the monitor HDR metadata where the application is displayed + int appAdapterIndex = 0; + int appOutputIndex = 0; + if (SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(m_DecoderParams.window), &appAdapterIndex, &appOutputIndex)){ + IDXGIAdapter1* adapter = nullptr; IDXGIOutput* output = nullptr; - for (UINT outputIndex = 0; SUCCEEDED(adapter->EnumOutputs(outputIndex, &output)); ++outputIndex) { - IDXGIOutput6* output6 = nullptr; - if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { - DXGI_OUTPUT_DESC1 desc1; - if (output6) { - output6->GetDesc1(&desc1); - // Magic constants to convert to fixed point. - // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 - static constexpr int kPrimariesFixedPoint = 50000; - static constexpr int kMinLuminanceFixedPoint = 10000; - - // Format Monitor HDR MetaData - outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; - outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; - outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; - outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; - // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. - outputHDRMetaData.MaxContentLightLevel = 0; - outputHDRMetaData.MaxFrameAverageLightLevel = 0; - - // Prepare HDR for the OutPut Monitor - m_VideoContext->VideoProcessorSetOutputHDRMetaData( - m_VideoProcessor.Get(), - DXGI_HDR_METADATA_TYPE_HDR10, - sizeof(DXGI_HDR_METADATA_HDR10), - &outputHDRMetaData - ); + UINT outputIndex = appOutputIndex; + if(SUCCEEDED(m_Factory->EnumAdapters1(appAdapterIndex, &adapter))){ + if(SUCCEEDED(adapter->EnumOutputs(outputIndex, &output))){ + IDXGIOutput6* output6 = nullptr; + if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { + DXGI_OUTPUT_DESC1 desc1; + if (output6) { + output6->GetDesc1(&desc1); + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kPrimariesFixedPoint = 50000; + static constexpr int kMinLuminanceFixedPoint = 10000; + + // Format Monitor HDR MetaData + outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; + outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; + outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; + // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. + outputHDRMetaData.MaxContentLightLevel = 0; + outputHDRMetaData.MaxFrameAverageLightLevel = 0; + + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor.Get(), + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + } } - - break; + SAFE_COM_RELEASE(output6); } - output6->Release(); + SAFE_COM_RELEASE(output); } - adapter->Release(); - // Break early if we've found an IDXGIOutput - if (output) - break; + SAFE_COM_RELEASE(adapter); } } } @@ -448,9 +447,6 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() adapterIndex = index; } } - - - } index++; From 51753e19c00f344cf9622cf9a8a999d6a947c230 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 15:13:51 +0100 Subject: [PATCH 22/53] Disable VSR and HDR when scan for capabilities During the scan, it is useless to enable enhancement capabilities for all GPU as it will be done later right after for only the selected GPU. --- .../video/ffmpeg-renderers/d3d11va.cpp | 52 ++++++++++--------- .../video/ffmpeg-renderers/d3d11va.h | 12 ++--- 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 1304f9bac..40ea3a7b6 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -349,7 +349,9 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } - createVideoProcessor(); + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + createVideoProcessor(); + } if (!checkDecoderSupport(adapter)) { SAFE_COM_RELEASE(m_DeviceContext); @@ -425,20 +427,20 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() int score = -1; // Video Super Resolution - if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDVideoSuperResolution()){ + if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDVideoSuperResolution(false, false)){ score = std::max(score, 200); - } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelVideoSuperResolution()){ + } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelVideoSuperResolution(false, false)){ score = std::max(score, 100); - } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaVideoSuperResolution()){ + } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaVideoSuperResolution(false, false)){ score = std::max(score, 300); } // SDR to HDR auto conversion - if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDHDR()){ + if(m_VideoEnhancement->isVendorAMD(adapterDesc.VendorId) && enableAMDHDR(false, false)){ score = std::max(score, 20); - } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelHDR()){ + } else if(m_VideoEnhancement->isVendorIntel(adapterDesc.VendorId) && enableIntelHDR(false, false)){ score = std::max(score, 10); - } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaHDR()){ + } else if(m_VideoEnhancement->isVendorNVIDIA(adapterDesc.VendorId) && enableNvidiaHDR(false, false)){ score = std::max(score, 30); } @@ -519,7 +521,7 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature * \return bool Return true if the capability is available */ -bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ +bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo){ // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 check how to implement it // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 @@ -529,7 +531,7 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution capability is not yet supported by your client's GPU."); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution capability is not yet supported by your client's GPU."); return false; } @@ -545,7 +547,7 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate){ * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature * \return bool Return true if the capability is available */ -bool D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ +bool D3D11VARenderer::enableIntelVideoSuperResolution(bool activate, bool logInfo){ HRESULT hr; constexpr GUID GUID_INTEL_VPE_INTERFACE = {0xedd1d4b9, 0x8659, 0x4cbc, {0xa4, 0xd6, 0x98, 0x31, 0xa2, 0x16, 0x3a, 0xc3}}; @@ -616,9 +618,9 @@ bool D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ } if(activate){ - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution enabled"); } else { - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel Video Super Resolution disabled"); } return true; @@ -638,7 +640,7 @@ bool D3D11VARenderer::enableIntelVideoSuperResolution(bool activate){ * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature * \return bool Return true if the capability is available */ -bool D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ +bool D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate, bool logInfo){ HRESULT hr; // Toggle VSR @@ -666,9 +668,9 @@ bool D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ } if(activate){ - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution enabled"); } else { - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX Video Super Resolution disabled"); } return true; @@ -682,11 +684,11 @@ bool D3D11VARenderer::enableNvidiaVideoSuperResolution(bool activate){ * \param bool activate Default is true, at true it enables the use of HDR feature * \return bool Return true if the capability is available */ -bool D3D11VARenderer::enableAMDHDR(bool activate){ +bool D3D11VARenderer::enableAMDHDR(bool activate, bool logInfo){ // [TODO] Feature not yet announced - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD HDR capability is not yet supported by your client's GPU."); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD HDR capability is not yet supported by your client's GPU."); return false; } @@ -698,11 +700,11 @@ bool D3D11VARenderer::enableAMDHDR(bool activate){ * \param bool activate Default is true, at true it enables the use of HDR feature * \return bool Return true if the capability is available */ -bool D3D11VARenderer::enableIntelHDR(bool activate){ +bool D3D11VARenderer::enableIntelHDR(bool activate, bool logInfo){ // [TODO] Feature not yet announced - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel HDR capability is not yet supported by your client's GPU."); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Intel HDR capability is not yet supported by your client's GPU."); return false; } @@ -721,7 +723,7 @@ bool D3D11VARenderer::enableIntelHDR(bool activate){ * \param bool activate Default is true, at true it enables the use of HDR feature * \return bool Return true if the capability is available */ -bool D3D11VARenderer::enableNvidiaHDR(bool activate){ +bool D3D11VARenderer::enableNvidiaHDR(bool activate, bool logInfo){ HRESULT hr; // Toggle HDR @@ -750,9 +752,9 @@ bool D3D11VARenderer::enableNvidiaHDR(bool activate){ } if(activate){ - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR enabled"); } else { - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "NVIDIA RTX HDR disabled"); } return true; @@ -1033,7 +1035,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - if(m_VideoProcessor){ + if(m_VideoProcessor && m_VideoEnhancement->isVideoEnhancementEnabled()){ initializeVideoProcessor(); } @@ -1517,13 +1519,13 @@ bool D3D11VARenderer::initializeVideoProcessor() int edgeEnhancement = 0; if(m_VideoEnhancement->isVendorAMD()){ noiseReduction = 30; - edgeEnhancement = 50; + edgeEnhancement = 30; } else if(m_VideoEnhancement->isVendorIntel()){ noiseReduction = 30; edgeEnhancement = 30; } else if(m_VideoEnhancement->isVendorNVIDIA()){ noiseReduction = 30; - edgeEnhancement = 50; + edgeEnhancement = 30; } // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, noiseReduction); // (0 / 0 / 100) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 8a070ed37..a85ccac27 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -38,12 +38,12 @@ class D3D11VARenderer : public IFFmpegRenderer void renderVideo(AVFrame* frame); bool createVideoProcessor(); bool initializeVideoProcessor(); - bool enableAMDVideoSuperResolution(bool activate = true); - bool enableIntelVideoSuperResolution(bool activate = true); - bool enableNvidiaVideoSuperResolution(bool activate = true); - bool enableAMDHDR(bool activate = true); - bool enableIntelHDR(bool activate = true); - bool enableNvidiaHDR(bool activate = true); + bool enableAMDVideoSuperResolution(bool activate = true, bool logInfo = true); + bool enableIntelVideoSuperResolution(bool activate = true, bool logInfo = true); + bool enableNvidiaVideoSuperResolution(bool activate = true, bool logInfo = true); + bool enableAMDHDR(bool activate = true, bool logInfo = true); + bool enableIntelHDR(bool activate = true, bool logInfo = true); + bool enableNvidiaHDR(bool activate = true, bool logInfo = true); bool checkDecoderSupport(IDXGIAdapter* adapter); int getAdapterIndexByEnhancementCapabilities(); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); From 3de61ec2e9cefbe362527255147e5b253a95700e Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 16:59:34 +0100 Subject: [PATCH 23/53] Bug Fixes - [Minor] No need to set HDR Stream and Output if HDR is disabled in Moonlight UI - [Minor] During the selection of most Video enhancement GPU, the best result was not saved resulting of selecting the last GPU scanned. --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 40ea3a7b6..f3f31731a 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -446,6 +446,7 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() // Recording the highest score, which will represent the most capable adapater for Video enhancement if(score > highestScore){ + highestScore = score; adapterIndex = index; } } @@ -1501,11 +1502,14 @@ bool D3D11VARenderer::initializeVideoProcessor() m_StreamData.ppFutureSurfacesRight = nullptr; m_StreamData.pInputSurfaceRight = nullptr; - // Prepare HDR Meta Data for Stream content - setHDRStream(); - // Prepare HDR Meta Data for the OutPut Monitor, will be ignored while using SDR - setHDROutPut(); + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + // Prepare HDR Meta Data for Stream content + setHDRStream(); + + // Prepare HDR Meta Data for the OutPut Monitor, will be ignored while using SDR + setHDROutPut(); + } // Set OutPut ColorSpace if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ From ec5c69ef62c45dfb07daf425a0ee46715f310bb4 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 21:48:21 +0100 Subject: [PATCH 24/53] Use of setHdrMode callback to set HDR MetaData for Stream and Output --- .../video/ffmpeg-renderers/d3d11va.cpp | 148 +++++++++--------- .../video/ffmpeg-renderers/d3d11va.h | 3 +- 2 files changed, 73 insertions(+), 78 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index f3f31731a..a3f1214dc 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -180,20 +180,28 @@ D3D11VARenderer::~D3D11VARenderer() } /** - * \brief Set Monitor HDR MetaData information + * \brief Set HDR MetaData information for Stream and Output * - * Get the Monitor HDR MetaData via LimeLight library + * Get the HDR MetaData via LimeLight library sent by Sunshine to apply to the Stream. + * Get the monitor HDR MetaData where the application is running to apply to the Output. * - * \param PSS_HDR_METADATA* HDRMetaData The variable to set the metadata information - * \return bool Return True is succeed + * \param bool enabled At true it enables the HDR settings + * \return void */ -void D3D11VARenderer::setHDRStream(){ +void D3D11VARenderer::setHdrMode(bool enabled){ + + // m_VideoProcessor needs to be available to be set, + // and it makes sense only when HDR is enabled from the UI + if(!enabled || !m_VideoProcessor || !(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT)) + return; DXGI_HDR_METADATA_HDR10 streamHDRMetaData; + DXGI_HDR_METADATA_HDR10 outputHDRMetaData; - // Prepare HDR Meta Data for Stream content + // Prepare HDR Meta Data for Streamed content + bool streamSet = false; SS_HDR_METADATA hdrMetadata; - if (m_VideoProcessor && LiGetHdrMetadata(&hdrMetadata)) { + if (LiGetHdrMetadata(&hdrMetadata)) { streamHDRMetaData.RedPrimary[0] = hdrMetadata.displayPrimaries[0].x; streamHDRMetaData.RedPrimary[1] = hdrMetadata.displayPrimaries[0].y; streamHDRMetaData.GreenPrimary[0] = hdrMetadata.displayPrimaries[1].x; @@ -218,72 +226,69 @@ void D3D11VARenderer::setHDRStream(){ sizeof(DXGI_HDR_METADATA_HDR10), &streamHDRMetaData ); - } -} -/** - * \brief Set Monitor HDR MetaData information - * - * Get the Monitor HDR MetaData via LimeLight library - * - * \param PSS_HDR_METADATA* HDRMetaData The variable to set the metadata information - * \return bool Return True is succeed - */ -void D3D11VARenderer::setHDROutPut(){ - DXGI_HDR_METADATA_HDR10 outputHDRMetaData; - - if (m_VideoProcessor){ - - // Retreive the monitor HDR metadata where the application is displayed - int appAdapterIndex = 0; - int appOutputIndex = 0; - if (SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(m_DecoderParams.window), &appAdapterIndex, &appOutputIndex)){ - IDXGIAdapter1* adapter = nullptr; - IDXGIOutput* output = nullptr; - UINT outputIndex = appOutputIndex; - if(SUCCEEDED(m_Factory->EnumAdapters1(appAdapterIndex, &adapter))){ - if(SUCCEEDED(adapter->EnumOutputs(outputIndex, &output))){ - IDXGIOutput6* output6 = nullptr; - if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { - DXGI_OUTPUT_DESC1 desc1; - if (output6) { - output6->GetDesc1(&desc1); - // Magic constants to convert to fixed point. - // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 - static constexpr int kPrimariesFixedPoint = 50000; - static constexpr int kMinLuminanceFixedPoint = 10000; - - // Format Monitor HDR MetaData - outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; - outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; - outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; - outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; - outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; - outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; - // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. - outputHDRMetaData.MaxContentLightLevel = 0; - outputHDRMetaData.MaxFrameAverageLightLevel = 0; - - // Prepare HDR for the OutPut Monitor - m_VideoContext->VideoProcessorSetOutputHDRMetaData( - m_VideoProcessor.Get(), - DXGI_HDR_METADATA_TYPE_HDR10, - sizeof(DXGI_HDR_METADATA_HDR10), - &outputHDRMetaData - ); - } + streamSet = true; + } + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, + "Set stream HDR mode: %s", streamSet ? "enabled" : "disabled"); + + return; + // Prepare HDR Meta Data to match the monitor HDR specifications + // Retreive the monitor HDR metadata where the application is displayed + int appAdapterIndex = 0; + int appOutputIndex = 0; + bool displaySet = false; + if (SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(m_DecoderParams.window), &appAdapterIndex, &appOutputIndex)){ + IDXGIAdapter1* adapter = nullptr; + IDXGIOutput* output = nullptr; + UINT outputIndex = appOutputIndex; + if(SUCCEEDED(m_Factory->EnumAdapters1(appAdapterIndex, &adapter))){ + if(SUCCEEDED(adapter->EnumOutputs(outputIndex, &output))){ + IDXGIOutput6* output6 = nullptr; + if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { + DXGI_OUTPUT_DESC1 desc1; + if (output6) { + output6->GetDesc1(&desc1); + // Magic constants to convert to fixed point. + // https://docs.microsoft.com/en-us/windows/win32/api/dxgi1_5/ns-dxgi1_5-dxgi_hdr_metadata_hdr10 + static constexpr int kPrimariesFixedPoint = 50000; + static constexpr int kMinLuminanceFixedPoint = 10000; + + // Format Monitor HDR MetaData + outputHDRMetaData.RedPrimary[0] = desc1.RedPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.RedPrimary[1] = desc1.RedPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[0] = desc1.GreenPrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.GreenPrimary[1] = desc1.GreenPrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[0] = desc1.BluePrimary[0] * kPrimariesFixedPoint; + outputHDRMetaData.BluePrimary[1] = desc1.BluePrimary[1] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[0] = desc1.WhitePoint[0] * kPrimariesFixedPoint; + outputHDRMetaData.WhitePoint[1] = desc1.WhitePoint[1] * kPrimariesFixedPoint; + outputHDRMetaData.MaxMasteringLuminance = desc1.MaxLuminance; + outputHDRMetaData.MinMasteringLuminance = desc1.MinLuminance * kMinLuminanceFixedPoint; + // Set it the same as streamed source which is 0 by default as it cannot be evaluated on the fly. + outputHDRMetaData.MaxContentLightLevel = 0; + outputHDRMetaData.MaxFrameAverageLightLevel = 0; + + // Prepare HDR for the OutPut Monitor + m_VideoContext->VideoProcessorSetOutputHDRMetaData( + m_VideoProcessor.Get(), + DXGI_HDR_METADATA_TYPE_HDR10, + sizeof(DXGI_HDR_METADATA_HDR10), + &outputHDRMetaData + ); + + displaySet = true; } - SAFE_COM_RELEASE(output6); } - SAFE_COM_RELEASE(output); + SAFE_COM_RELEASE(output6); } - SAFE_COM_RELEASE(adapter); + SAFE_COM_RELEASE(output); } + SAFE_COM_RELEASE(adapter); } + SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, + "Set display HDR mode: %s", displaySet ? "enabled" : "disabled"); + } bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) @@ -1502,15 +1507,6 @@ bool D3D11VARenderer::initializeVideoProcessor() m_StreamData.ppFutureSurfacesRight = nullptr; m_StreamData.pInputSurfaceRight = nullptr; - - if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ - // Prepare HDR Meta Data for Stream content - setHDRStream(); - - // Prepare HDR Meta Data for the OutPut Monitor, will be ignored while using SDR - setHDROutPut(); - } - // Set OutPut ColorSpace if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index a85ccac27..ad5c24221 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -25,6 +25,7 @@ class D3D11VARenderer : public IFFmpegRenderer virtual int getRendererAttributes() override; virtual int getDecoderCapabilities() override; virtual bool needsTestFrame() override; + virtual void setHdrMode(bool enabled) override; private: static void lockContext(void* lock_ctx); @@ -47,8 +48,6 @@ class D3D11VARenderer : public IFFmpegRenderer bool checkDecoderSupport(IDXGIAdapter* adapter); int getAdapterIndexByEnhancementCapabilities(); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); - void setHDRStream(); - void setHDROutPut(); int m_DecoderSelectionPass; From 4db0f6bdf9a0f0120206c4c4f682f6e78fe21a52 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 22:31:32 +0100 Subject: [PATCH 25/53] Add enableVideoEnhancement to DECODER_PARAMETERS --- app/streaming/session.cpp | 16 +++++++++------- app/streaming/session.h | 2 +- app/streaming/video/decoder.h | 1 + app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 7 ++----- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/app/streaming/session.cpp b/app/streaming/session.cpp index 399da4e81..5197fb359 100644 --- a/app/streaming/session.cpp +++ b/app/streaming/session.cpp @@ -260,7 +260,7 @@ void Session::clSetControllerLED(uint16_t controllerNumber, uint8_t r, uint8_t g bool Session::chooseDecoder(StreamingPreferences::VideoDecoderSelection vds, SDL_Window* window, int videoFormat, int width, int height, - int frameRate, bool enableVsync, bool enableFramePacing, bool testOnly, IVideoDecoder*& chosenDecoder) + int frameRate, bool enableVsync, bool enableFramePacing, bool enableVideoEnhancement, bool testOnly, IVideoDecoder*& chosenDecoder) { DECODER_PARAMETERS params; @@ -276,6 +276,7 @@ bool Session::chooseDecoder(StreamingPreferences::VideoDecoderSelection vds, params.window = window; params.enableVsync = enableVsync; params.enableFramePacing = enableFramePacing; + params.enableVideoEnhancement = enableVideoEnhancement; params.testOnly = testOnly; params.vds = vds; @@ -380,7 +381,7 @@ void Session::getDecoderInfo(SDL_Window* window, // Try an HEVC Main10 decoder first to see if we have HDR support if (chooseDecoder(StreamingPreferences::VDS_FORCE_HARDWARE, window, VIDEO_FORMAT_H265_MAIN10, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHardwareAccelerated = decoder->isHardwareAccelerated(); isFullScreenOnly = decoder->isAlwaysFullScreen(); isHdrSupported = decoder->isHdrSupported(); @@ -393,7 +394,7 @@ void Session::getDecoderInfo(SDL_Window* window, // Try an AV1 Main10 decoder next to see if we have HDR support if (chooseDecoder(StreamingPreferences::VDS_FORCE_HARDWARE, window, VIDEO_FORMAT_AV1_MAIN10, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { // If we've got a working AV1 Main 10-bit decoder, we'll enable the HDR checkbox // but we will still continue probing to get other attributes for HEVC or H.264 // decoders. See the AV1 comment at the top of the function for more info. @@ -409,7 +410,7 @@ void Session::getDecoderInfo(SDL_Window* window, // Try a regular hardware accelerated HEVC decoder now if (chooseDecoder(StreamingPreferences::VDS_FORCE_HARDWARE, window, VIDEO_FORMAT_H265, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHardwareAccelerated = decoder->isHardwareAccelerated(); isFullScreenOnly = decoder->isAlwaysFullScreen(); maxResolution = decoder->getDecoderMaxResolution(); @@ -436,7 +437,7 @@ void Session::getDecoderInfo(SDL_Window* window, // This will fall back to software decoding, so it should always work. if (chooseDecoder(StreamingPreferences::VDS_AUTO, window, VIDEO_FORMAT_H264, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHardwareAccelerated = decoder->isHardwareAccelerated(); isFullScreenOnly = decoder->isAlwaysFullScreen(); maxResolution = decoder->getDecoderMaxResolution(); @@ -455,7 +456,7 @@ bool Session::isHardwareDecodeAvailable(SDL_Window* window, { IVideoDecoder* decoder; - if (!chooseDecoder(vds, window, videoFormat, width, height, frameRate, false, false, true, decoder)) { + if (!chooseDecoder(vds, window, videoFormat, width, height, frameRate, false, false, false, true, decoder)) { return false; } @@ -493,7 +494,7 @@ bool Session::populateDecoderProperties(SDL_Window* window) m_StreamConfig.width, m_StreamConfig.height, m_StreamConfig.fps, - false, false, true, decoder)) { + false, false, false, true, decoder)) { return false; } @@ -2032,6 +2033,7 @@ void Session::execInternal() m_ActiveVideoHeight, m_ActiveVideoFrameRate, enableVsync, enableVsync && m_Preferences->framePacing, + m_Preferences->videoEnhancement, false, s_ActiveSession->m_VideoDecoder)) { SDL_AtomicUnlock(&m_DecoderLock); diff --git a/app/streaming/session.h b/app/streaming/session.h index 26c5102cb..6584b59b9 100644 --- a/app/streaming/session.h +++ b/app/streaming/session.h @@ -100,7 +100,7 @@ class Session : public QObject bool chooseDecoder(StreamingPreferences::VideoDecoderSelection vds, SDL_Window* window, int videoFormat, int width, int height, int frameRate, bool enableVsync, bool enableFramePacing, - bool testOnly, + bool enableVideoEnhancement, bool testOnly, IVideoDecoder*& chosenDecoder); static diff --git a/app/streaming/video/decoder.h b/app/streaming/video/decoder.h index 24708d828..f3c8ef76e 100644 --- a/app/streaming/video/decoder.h +++ b/app/streaming/video/decoder.h @@ -42,6 +42,7 @@ typedef struct _DECODER_PARAMETERS { int frameRate; bool enableVsync; bool enableFramePacing; + bool enableVideoEnhancement; bool testOnly; } DECODER_PARAMETERS, *PDECODER_PARAMETERS; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index a3f1214dc..4b104cf52 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -7,7 +7,6 @@ #include "streaming/streamutils.h" #include "streaming/session.h" -#include "settings/streamingpreferences.h" #include "streaming/video/videoenhancement.h" #include @@ -817,8 +816,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) if(m_VideoEnhancement->isEnhancementCapable()){ // Check if the user has enable Video enhancement - StreamingPreferences streamingPreferences; - m_VideoEnhancement->enableVideoEnhancement(streamingPreferences.videoEnhancement); + m_VideoEnhancement->enableVideoEnhancement(m_DecoderParams.enableVideoEnhancement); } // Set the adapter index of the most appropriate GPU @@ -2111,8 +2109,7 @@ bool D3D11VARenderer::setupVideoTexture() texDesc.Usage = D3D11_USAGE_DEFAULT; texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement - StreamingPreferences streamingPreferences; - if(streamingPreferences.videoEnhancement && m_VideoEnhancement->isEnhancementCapable()){ + if(m_DecoderParams.enableVideoEnhancement && m_VideoEnhancement->isEnhancementCapable()){ texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; } texDesc.CPUAccessFlags = 0; From 986774be95f9dd206478c442ae4f8c6ae74c6502 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 23:17:18 +0100 Subject: [PATCH 26/53] Replace VideEnhancement method used in QML by SystemProperties' ones. - Replace VideoEnhancement::isUIvisible by SystemProperties::isVideoEnhancementCapable - Replace VideoEnhancement::isExperimentalby SystemProperties::isVideoEnhancementExperimental --- app/backend/systemproperties.cpp | 25 +++++++++++++++++++++++++ app/backend/systemproperties.h | 2 ++ app/gui/SettingsView.qml | 7 +++---- app/main.cpp | 10 ---------- app/streaming/video/videoenhancement.h | 10 +++------- 5 files changed, 33 insertions(+), 21 deletions(-) diff --git a/app/backend/systemproperties.cpp b/app/backend/systemproperties.cpp index 4c1593ee8..c5dcdf850 100644 --- a/app/backend/systemproperties.cpp +++ b/app/backend/systemproperties.cpp @@ -6,6 +6,7 @@ #include "streaming/session.h" #include "streaming/streamutils.h" +#include "streaming/video/videoenhancement.h" #ifdef Q_OS_WIN32 #define WIN32_LEAN_AND_MEAN @@ -239,3 +240,27 @@ void SystemProperties::refreshDisplaysInternal() SDL_QuitSubSystem(SDL_INIT_VIDEO); } + +/** + * \brief Inform if the GPU is capable of Video enhancement + * + * Check if either Video Super-Resolution or SDR-to-HDR features can be used by the GPU. + * + * \return bool Returns true if the GPU is capable + */ +bool SystemProperties::isVideoEnhancementCapable() +{ + VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); + return videoEnhancement->isUIvisible(); +} + +/** + * \brief Inform if the GPU's driver is at an experiemental state of Video enhancement implementation + * + * \return bool Returns true if it is experimental yet + */ +bool SystemProperties::isVideoEnhancementExperimental() +{ + VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); + return videoEnhancement->isExperimental(); +} diff --git a/app/backend/systemproperties.h b/app/backend/systemproperties.h index 894f4c69a..8de88be01 100644 --- a/app/backend/systemproperties.h +++ b/app/backend/systemproperties.h @@ -31,6 +31,8 @@ class SystemProperties : public QObject Q_INVOKABLE void refreshDisplays(); Q_INVOKABLE QRect getNativeResolution(int displayIndex); Q_INVOKABLE int getRefreshRate(int displayIndex); + Q_INVOKABLE bool isVideoEnhancementCapable(); + Q_INVOKABLE bool isVideoEnhancementExperimental(); signals: void unmappedGamepadsChanged(); diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index 80ad605fc..84a4d15f2 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -7,7 +7,6 @@ import StreamingPreferences 1.0 import ComputerManager 1.0 import SdlGamepadKeyNavigation 1.0 import SystemProperties 1.0 -import VideoEnhancement 1.0 Flickable { id: settingsPage @@ -820,7 +819,7 @@ Flickable { hoverEnabled: true text: qsTr("Video AI-Enhancement") font.pointSize: 12 - visible: VideoEnhancement.isUIvisible() + visible: SystemProperties.isVideoEnhancementCapable() enabled: true checked: StreamingPreferences.videoEnhancement @@ -838,12 +837,12 @@ Flickable { + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") function reinitialize() { - if(!VideoEnhancement.isUIvisible()){ + if(!SystemProperties.isVideoEnhancementCapable()){ checked = false visible = false } // Indicate if the feature is available but not officially deployed by the Vendor - if(VideoEnhancement.isExperimental()){ + if(SystemProperties.isVideoEnhancementExperimental()){ text = qsTr("Video AI-Enhancement (Experimental)") } } diff --git a/app/main.cpp b/app/main.cpp index f01d2ffb8..67eac1d77 100644 --- a/app/main.cpp +++ b/app/main.cpp @@ -43,7 +43,6 @@ #include "streaming/session.h" #include "settings/streamingpreferences.h" #include "gui/sdlgamepadkeynavigation.h" -#include "streaming/video/videoenhancement.h" #if !defined(QT_DEBUG) && defined(Q_OS_WIN32) // Log to file for release Windows builds @@ -643,15 +642,6 @@ int main(int argc, char *argv[]) [](QQmlEngine* qmlEngine, QJSEngine*) -> QObject* { return new StreamingPreferences(qmlEngine); }); - qmlRegisterSingletonType("VideoEnhancement", 1, 0, "VideoEnhancement", - [](QQmlEngine *engine, QJSEngine *scriptEngine) -> QObject* { - Q_UNUSED(engine) - Q_UNUSED(scriptEngine) - VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); - // Set the ownership to CppOwnership to avoid an error when QLM engine tries to delete the object - QQmlEngine::setObjectOwnership(videoEnhancement, QQmlEngine::CppOwnership); - return videoEnhancement; - }); // Create the identity manager on the main thread IdentityManager::get(); diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 2d30f094f..3836ad164 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -3,11 +3,8 @@ #pragma once -#include - -class VideoEnhancement : public QObject +class VideoEnhancement { - Q_OBJECT private: @@ -54,9 +51,8 @@ class VideoEnhancement : public QObject void enableUIvisible(bool visible = true); void setAdapterIndex(int adapterIndex); int getAdapterIndex(); - - Q_INVOKABLE bool isUIvisible(); - Q_INVOKABLE bool isExperimental(); + bool isUIvisible(); + bool isExperimental(); }; From 0957d50e9111121f21bbcd42f74a0f7301def014 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 1 Mar 2024 23:19:48 +0100 Subject: [PATCH 27/53] Remove the use of VideoProcessorSetStreamSourceRect StreamSourceRect does not needs to be set because it reads from the entire surface by default. --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 4b104cf52..e7a0902cc 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1456,11 +1456,6 @@ bool D3D11VARenderer::initializeVideoProcessor() if (FAILED(hr)) return false; - // Apply processed filters to the surface - RECT srcRect = { 0 }; - srcRect.right = m_DecoderParams.width; - srcRect.bottom = m_DecoderParams.height; - RECT dstRect = { 0 }; dstRect.right = m_DisplayWidth; dstRect.bottom = m_DisplayHeight; @@ -1469,7 +1464,7 @@ bool D3D11VARenderer::initializeVideoProcessor() float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); - // [TODO] There is a behavior I don't understand (bug?) when the destination desRect is larger by one of its side than the source srcRect. + // [TODO] There is a behavior I don't understand (bug?) when the destination desRect is larger by one of its side than the source. // If it is bigger, the window becomes black, but if it is smaller it is fine. // Only one case is working when it is bigger is when the dstRest perfectly equal to the Display size. // Investigation: If there anything to do with pixel alignment (c.f. dxva2.cpp FFALIGN), or screenSpaceToNormalizedDeviceCoords ? @@ -1488,7 +1483,6 @@ bool D3D11VARenderer::initializeVideoProcessor() } } - m_VideoContext->VideoProcessorSetStreamSourceRect(m_VideoProcessor.Get(), 0, true, &srcRect); m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &dstRect); m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); From e503d26a69421e665ea481e99e189b01f97bc8aa Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 2 Mar 2024 00:23:08 +0100 Subject: [PATCH 28/53] Resolution merge conflicts --- app/gui/SettingsView.qml | 31 ------------------- app/main.cpp | 10 ------ .../video/ffmpeg-renderers/d3d11va.cpp | 6 ---- .../video/ffmpeg-renderers/d3d11va.h | 3 -- app/streaming/video/videoenhancement.h | 1 - 5 files changed, 51 deletions(-) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index e7dd10730..84a4d15f2 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -7,16 +7,12 @@ import StreamingPreferences 1.0 import ComputerManager 1.0 import SdlGamepadKeyNavigation 1.0 import SystemProperties 1.0 -import VideoEnhancement 1.0 Flickable { id: settingsPage objectName: qsTr("Settings") signal languageChanged() - signal displayModeChanged() - signal windowModeChanged() - signal videoEnhancementChanged() boundsBehavior: Flickable.OvershootBounds @@ -235,9 +231,6 @@ Flickable { recalculateWidth() lastIndexValue = currentIndex - - // Signal other controls - displayModeChanged() } id: resolutionComboBox @@ -299,9 +292,6 @@ Flickable { else { updateBitrateForSelection() } - - // Signal other controls - displayModeChanged() } NavigableDialog { @@ -320,9 +310,6 @@ Flickable { onClosed: { widthField.clear() heightField.clear() - - // Signal other controls - displayModeChanged() } onRejected: { @@ -772,25 +759,9 @@ Flickable { activated(currentIndex) } - // Video Super-Resolution does not work in exclusive full screen, so auto switch do borderless window - // [TODO] This may change according to what AMD and Intel will implement, if they can allow video enhancement in fullscreen - function checkVSR(){ - if(videoEnhancementCheck.checked && model.get(currentIndex).val === StreamingPreferences.WM_FULLSCREEN){ - for (var i = 0; i < model.count; i++) { - var thisWm = model.get(i).val; - if (model.get(i).val === StreamingPreferences.WM_FULLSCREEN_DESKTOP) { - currentIndex = i - break - } - } - activated(currentIndex) - } - } - Component.onCompleted: { reinitialize() languageChanged.connect(reinitialize) - videoEnhancementChanged.connect(checkVSR) } id: windowModeComboBox @@ -800,8 +771,6 @@ Flickable { textRole: "text" onActivated: { StreamingPreferences.windowMode = model.get(currentIndex).val - // Signal others - windowModeChanged() } ToolTip.delay: 1000 diff --git a/app/main.cpp b/app/main.cpp index f01d2ffb8..67eac1d77 100644 --- a/app/main.cpp +++ b/app/main.cpp @@ -43,7 +43,6 @@ #include "streaming/session.h" #include "settings/streamingpreferences.h" #include "gui/sdlgamepadkeynavigation.h" -#include "streaming/video/videoenhancement.h" #if !defined(QT_DEBUG) && defined(Q_OS_WIN32) // Log to file for release Windows builds @@ -643,15 +642,6 @@ int main(int argc, char *argv[]) [](QQmlEngine* qmlEngine, QJSEngine*) -> QObject* { return new StreamingPreferences(qmlEngine); }); - qmlRegisterSingletonType("VideoEnhancement", 1, 0, "VideoEnhancement", - [](QQmlEngine *engine, QJSEngine *scriptEngine) -> QObject* { - Q_UNUSED(engine) - Q_UNUSED(scriptEngine) - VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); - // Set the ownership to CppOwnership to avoid an error when QLM engine tries to delete the object - QQmlEngine::setObjectOwnership(videoEnhancement, QQmlEngine::CppOwnership); - return videoEnhancement; - }); // Create the identity manager on the main thread IdentityManager::get(); diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 066de7dc4..e7a0902cc 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -99,7 +99,6 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_VideoProcessorEnumerator(nullptr), m_LastColorSpace(-1), m_LastFullRange(false), - m_LastServerHDR(LiGetCurrentHostDisplayHdrMode()), m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), m_AllowTearing(false), m_VideoGenericPixelShader(nullptr), @@ -1020,8 +1019,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // AVHWDeviceContext takes ownership of these objects d3d11vaDeviceContext->device = m_Device; d3d11vaDeviceContext->device_context = m_DeviceContext; - d3d11vaDeviceContext->video_device = m_VideoDevice; - d3d11vaDeviceContext->video_context = m_VideoContext; // Set lock functions that we will use to synchronize with FFmpeg's usage of our device context d3d11vaDeviceContext->lock = lockContext; @@ -1685,7 +1682,6 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) // Check if the format is supported by this decoder BOOL supported; - m_IsHDRenabled = false; switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: @@ -1731,7 +1727,6 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } - m_IsHDRenabled = true; break; case VIDEO_FORMAT_AV1_MAIN8: @@ -1762,7 +1757,6 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) videoDevice->Release(); return false; } - m_IsHDRenabled = true; break; default: diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index a9af01a53..ad5c24221 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -48,8 +48,6 @@ class D3D11VARenderer : public IFFmpegRenderer bool checkDecoderSupport(IDXGIAdapter* adapter); int getAdapterIndexByEnhancementCapabilities(); bool createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound = nullptr); - void setHDRStream(); - void setHDROutPut(); int m_DecoderSelectionPass; @@ -111,7 +109,6 @@ class D3D11VARenderer : public IFFmpegRenderer int m_DisplayHeight; int m_LastColorSpace; bool m_LastFullRange; - bool m_LastServerHDR; AVColorTransferCharacteristic m_LastColorTrc; bool m_AllowTearing; diff --git a/app/streaming/video/videoenhancement.h b/app/streaming/video/videoenhancement.h index 3836ad164..e3e1f3755 100644 --- a/app/streaming/video/videoenhancement.h +++ b/app/streaming/video/videoenhancement.h @@ -10,7 +10,6 @@ class VideoEnhancement static VideoEnhancement* instance; - // bool m_Initialized = false; bool m_Enabled = false; bool m_UIvisible = false; bool m_VSRcapable = false; From 8e239a36dcb5441b21e2561c904d024a64a7bb30 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 2 Mar 2024 11:45:09 +0100 Subject: [PATCH 29/53] Simplify the code for the checkbox Video Enhancement Code is simpler but the logic doesn't change --- app/gui/SettingsView.qml | 10 +--------- app/settings/streamingpreferences.h | 3 --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 4 ++-- 3 files changed, 3 insertions(+), 14 deletions(-) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index 84a4d15f2..fb0a39127 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -836,20 +836,12 @@ Flickable { + qsTr("\n - If available, ensure that appropriate settings (i.e. RTX Video enhancement) are enabled in your GPU driver configuration.") + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") - function reinitialize() { - if(!SystemProperties.isVideoEnhancementCapable()){ - checked = false - visible = false - } + Component.onCompleted: { // Indicate if the feature is available but not officially deployed by the Vendor if(SystemProperties.isVideoEnhancementExperimental()){ text = qsTr("Video AI-Enhancement (Experimental)") } } - - Component.onCompleted: { - reinitialize() - } } } } diff --git a/app/settings/streamingpreferences.h b/app/settings/streamingpreferences.h index 49558ae3b..403196640 100644 --- a/app/settings/streamingpreferences.h +++ b/app/settings/streamingpreferences.h @@ -117,7 +117,6 @@ class StreamingPreferences : public QObject Q_PROPERTY(bool absoluteTouchMode MEMBER absoluteTouchMode NOTIFY absoluteTouchModeChanged) Q_PROPERTY(bool framePacing MEMBER framePacing NOTIFY framePacingChanged) Q_PROPERTY(bool videoEnhancement MEMBER videoEnhancement NOTIFY videoEnhancementChanged) - Q_PROPERTY(bool videoEnhancementVisible MEMBER videoEnhancementVisible NOTIFY videoEnhancementVisibleChanged) Q_PROPERTY(bool connectionWarnings MEMBER connectionWarnings NOTIFY connectionWarningsChanged) Q_PROPERTY(bool richPresence MEMBER richPresence NOTIFY richPresenceChanged) Q_PROPERTY(bool gamepadMouse MEMBER gamepadMouse NOTIFY gamepadMouseChanged) @@ -155,7 +154,6 @@ class StreamingPreferences : public QObject bool absoluteTouchMode; bool framePacing; bool videoEnhancement; - bool videoEnhancementVisible; bool connectionWarnings; bool richPresence; bool gamepadMouse; @@ -197,7 +195,6 @@ class StreamingPreferences : public QObject void windowModeChanged(); void framePacingChanged(); void videoEnhancementChanged(); - void videoEnhancementVisibleChanged(); void connectionWarningsChanged(); void richPresenceChanged(); void gamepadMouseChanged(); diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index e7a0902cc..f64b89ca2 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1506,7 +1506,7 @@ bool D3D11VARenderer::initializeVideoProcessor() m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } - // The section is a customization to enhance (non-AI) shlithly the frame + // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance int noiseReduction = 0; int edgeEnhancement = 0; if(m_VideoEnhancement->isVendorAMD()){ @@ -1519,7 +1519,7 @@ bool D3D11VARenderer::initializeVideoProcessor() noiseReduction = 30; edgeEnhancement = 30; } - // Reduce artefacts (like pixelisation around text), does work in additionto AI-enhancement for better result + // Reduce artefacts (like pixelisation around text), does work in addition to AI-enhancement for better result m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, noiseReduction); // (0 / 0 / 100) // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, edgeEnhancement); // (0 / 0 / 100) From df9310236824ac2dcee9d483f54bc25b1a506b7b Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Wed, 27 Mar 2024 16:57:09 +0100 Subject: [PATCH 30/53] Add AMD Video Enhancement feature Use of AMD AMF to support Video upscaling using FSR 1.1 and Denoiser to improve picture rendering quality on AMD 7000+ Series. - Import AMF SDK as a Git submodule into the folder "third-party/AMF" - Load from app.pro only AMF libraries used by Moonlight - In UI, add a comment that Video Enhancement still have issues for HDR rendering - Wherever possible, use Microsoft::WRL::ComPtr to let Windows release objects and clear the memory efficiently by itself, making the code cleaner and easier to maintain - Add DirectX object debug messages - Add AMF HDR settings for Input and Output - Add AMF Video enhancement, Upscaling and Denoiser - Round Output Width and Height values to even numbers to avoid the rendering to crash when manually resize - Add Full range Color Space for VideoProcessor --- .gitmodules | 3 + app/app.pro | 12 + app/gui/SettingsView.qml | 1 + .../video/ffmpeg-renderers/d3d11va.cpp | 973 ++++++++++++------ .../video/ffmpeg-renderers/d3d11va.h | 68 +- third-party/AMF | 1 + 6 files changed, 743 insertions(+), 315 deletions(-) create mode 160000 third-party/AMF diff --git a/.gitmodules b/.gitmodules index 0b50efae7..4e39dbf33 100644 --- a/.gitmodules +++ b/.gitmodules @@ -17,3 +17,6 @@ path = libs url = https://github.com/cgutman/moonlight-qt-prebuilts.git shallow = true +[submodule "third-party/AMF"] + path = third-party/AMF + url = https://github.com/GPUOpen-LibrariesAndSDKs/AMF.git diff --git a/app/app.pro b/app/app.pro index e46f8e683..76739197b 100644 --- a/app/app.pro +++ b/app/app.pro @@ -383,6 +383,18 @@ win32:!winrt { streaming/video/ffmpeg-renderers/d3d11va.h \ streaming/video/ffmpeg-renderers/pacer/dxvsyncsource.h } +win32:!winrt { + message(AMF enabled for AMD Drivers) + + SOURCES += \ + ../third-party/AMF/amf/public/common/AMFFactory.cpp \ + ../third-party/AMF/amf/public/common/AMFSTL.cpp \ + ../third-party/AMF/amf/public/common/Thread.cpp \ + ../third-party/AMF/amf/public/common/TraceAdapter.cpp \ + ../third-party/AMF/amf/public/common/Windows\ThreadWindows.cpp + + INCLUDEPATH += $$PWD/../third-party/AMF/amf +} macx { message(VideoToolbox renderer selected) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index fb0a39127..ba4404703 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -834,6 +834,7 @@ Flickable { + qsTr("\nThis feature effectively upscales, reduces compression artifacts and enhances the clarity of streamed content.") + qsTr("\nNote:") + qsTr("\n - If available, ensure that appropriate settings (i.e. RTX Video enhancement) are enabled in your GPU driver configuration.") + + qsTr("\n - HDR rendering has divers issues depending on the GPU used, we are working on it but we advise to currently use Non-HDR.") + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") Component.onCompleted: { diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index f64b89ca2..e39f85d21 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -9,10 +9,19 @@ #include "streaming/session.h" #include "streaming/video/videoenhancement.h" +#include "public/common/AMFFactory.h" +#include "public/include/core/Platform.h" +#include "public/include/components/VideoConverter.h" +// Video upscaling & Sharpening +#include "public/include/components/HQScaler.h" +// Reducing blocking artifacts +#include "public/include/components/VQEnhancer.h" + #include #include #include #include +#include extern "C" { #include @@ -23,6 +32,8 @@ extern "C" { #include +using Microsoft::WRL::ComPtr; + #pragma comment(lib, "d3d11.lib") #pragma comment(lib, "dxgi.lib") @@ -88,31 +99,25 @@ static_assert(sizeof(CSC_CONST_BUF) % 16 == 0, "Constant buffer sizes must be a D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) : m_DecoderSelectionPass(decoderSelectionPass), - m_Factory(nullptr), - m_Device(nullptr), - m_SwapChain(nullptr), - m_DeviceContext(nullptr), - m_RenderTargetView(nullptr), - m_VideoDevice(nullptr), - m_VideoContext(nullptr), - m_VideoProcessor(nullptr), - m_VideoProcessorEnumerator(nullptr), - m_LastColorSpace(-1), - m_LastFullRange(false), - m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), - m_AllowTearing(false), - m_VideoGenericPixelShader(nullptr), - m_VideoBt601LimPixelShader(nullptr), - m_VideoBt2020LimPixelShader(nullptr), - m_VideoVertexBuffer(nullptr), - m_VideoTexture(nullptr), - m_OverlayLock(0), - m_OverlayPixelShader(nullptr), - m_HwDeviceContext(nullptr) + m_Device(nullptr), + m_DeviceContext(nullptr), + m_RenderTargetView(nullptr), + m_VideoProcessor(nullptr), + m_VideoProcessorEnumerator(nullptr), + m_LastColorSpace(-1), + m_LastFullRange(false), + m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), + m_AllowTearing(false), + m_OverlayLock(0), + m_HwDeviceContext(nullptr), + m_AmfContext(nullptr), + m_AmfInputSurface(nullptr), + m_AmfDenoiser(nullptr), + m_AmfFormatConverter(nullptr), + m_AmfUpScaler(nullptr), + m_AmfDownScaler(nullptr), + m_AmfInitialized(false) { - RtlZeroMemory(m_OverlayVertexBuffers, sizeof(m_OverlayVertexBuffers)); - RtlZeroMemory(m_OverlayTextures, sizeof(m_OverlayTextures)); - RtlZeroMemory(m_OverlayTextureResourceViews, sizeof(m_OverlayTextureResourceViews)); RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); m_ContextLock = SDL_CreateMutex(); @@ -128,35 +133,40 @@ D3D11VARenderer::~D3D11VARenderer() SDL_DestroyMutex(m_ContextLock); - SAFE_COM_RELEASE(m_VideoVertexBuffer); - SAFE_COM_RELEASE(m_VideoBt2020LimPixelShader); - SAFE_COM_RELEASE(m_VideoBt601LimPixelShader); - SAFE_COM_RELEASE(m_VideoGenericPixelShader); - for (int i = 0; i < ARRAYSIZE(m_VideoTextureResourceViews); i++) { SAFE_COM_RELEASE(m_VideoTextureResourceViews[i]); } - SAFE_COM_RELEASE(m_VideoTexture); + SAFE_COM_RELEASE(m_RenderTargetView); - for (int i = 0; i < ARRAYSIZE(m_OverlayVertexBuffers); i++) { - SAFE_COM_RELEASE(m_OverlayVertexBuffers[i]); + // cleanup AMF instances + if(m_AmfDenoiser){ + // Denoiser + m_AmfDenoiser->Terminate(); + m_AmfDenoiser = nullptr; } - - for (int i = 0; i < ARRAYSIZE(m_OverlayTextureResourceViews); i++) { - SAFE_COM_RELEASE(m_OverlayTextureResourceViews[i]); + if(m_AmfFormatConverter){ + // Format converter + m_AmfFormatConverter->Terminate(); + m_AmfFormatConverter = nullptr; } - - for (int i = 0; i < ARRAYSIZE(m_OverlayTextures); i++) { - SAFE_COM_RELEASE(m_OverlayTextures[i]); + if(m_AmfUpScaler){ + // Up Scaler + m_AmfUpScaler->Terminate(); + m_AmfUpScaler = nullptr; + } + if(m_AmfDownScaler){ + // Down Scaler + m_AmfDownScaler->Terminate(); + m_AmfDownScaler = nullptr; + } + if(m_AmfContext){ + // Context + m_AmfContext->Terminate(); + m_AmfContext = nullptr; } - SAFE_COM_RELEASE(m_OverlayPixelShader); - - SAFE_COM_RELEASE(m_BackBufferResource); - - SAFE_COM_RELEASE(m_RenderTargetView); - SAFE_COM_RELEASE(m_SwapChain); + g_AMFFactory.Terminate(); // Force destruction of the swapchain immediately if (m_DeviceContext != nullptr) { @@ -164,6 +174,20 @@ D3D11VARenderer::~D3D11VARenderer() m_DeviceContext->Flush(); } + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + +#ifdef QT_DEBUG + ComPtr debugDevice; + if(FAILED(m_Device->QueryInterface(__uuidof(ID3D11Debug), reinterpret_cast(debugDevice.GetAddressOf())))) { + debugDevice = nullptr; + } +#endif + if (m_HwDeviceContext != nullptr) { // This will release m_Device and m_DeviceContext too av_buffer_unref(&m_HwDeviceContext); @@ -171,11 +195,20 @@ D3D11VARenderer::~D3D11VARenderer() else { SAFE_COM_RELEASE(m_Device); SAFE_COM_RELEASE(m_DeviceContext); - SAFE_COM_RELEASE(m_VideoDevice); - SAFE_COM_RELEASE(m_VideoContext); } - SAFE_COM_RELEASE(m_Factory); +// Uncomment the lines in the QT_DEBUG section if you need to debug DirectX objects +#ifdef QT_DEBUG + // if(debugDevice) { + // debugDevice->ReportLiveDeviceObjects(D3D11_RLDO_IGNORE_INTERNAL); + // } + // CComPtr pDebugDevice; + // if (SUCCEEDED(DXGIGetDebugInterface1(0, IID_PPV_ARGS(&pDebugDevice)))) + // { + // pDebugDevice->ReportLiveObjects(DXGI_DEBUG_DX, DXGI_DEBUG_RLO_FLAGS(DXGI_DEBUG_RLO_DETAIL | DXGI_DEBUG_RLO_IGNORE_INTERNAL)); + // } +#endif + } /** @@ -226,24 +259,57 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &streamHDRMetaData ); + // Set HDR Input for AMF Converter + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); + + // Values taken from AMF Sample: + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L740 + // We can initialize with Studio range first, it will be corrected to Full range if needed once the first frame is received. + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_STUDIO); + + AMFHDRMetadata amfHDRMetadata; + amfHDRMetadata.redPrimary[0] = amf_uint16(streamHDRMetaData.RedPrimary[0]); + amfHDRMetadata.redPrimary[1] = amf_uint16(streamHDRMetaData.RedPrimary[1]); + amfHDRMetadata.greenPrimary[0] = amf_uint16(streamHDRMetaData.GreenPrimary[0]); + amfHDRMetadata.greenPrimary[1] = amf_uint16(streamHDRMetaData.GreenPrimary[1]); + amfHDRMetadata.bluePrimary[0] = amf_uint16(streamHDRMetaData.BluePrimary[0]); + amfHDRMetadata.bluePrimary[1] = amf_uint16(streamHDRMetaData.BluePrimary[1]); + amfHDRMetadata.whitePoint[0] = amf_uint16(streamHDRMetaData.WhitePoint[0]); + amfHDRMetadata.whitePoint[1] = amf_uint16(streamHDRMetaData.WhitePoint[1]); + amfHDRMetadata.maxMasteringLuminance = amf_uint32(streamHDRMetaData.MaxMasteringLuminance); + amfHDRMetadata.minMasteringLuminance = amf_uint32(streamHDRMetaData.MinMasteringLuminance); + amfHDRMetadata.maxContentLightLevel = 0; + amfHDRMetadata.maxFrameAverageLightLevel = 0; + + amf::AMFBufferPtr pHDRMetaDataBuffer; + m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); + AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); + memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_HDR_METADATA, pData); + + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } + streamSet = true; } SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Set stream HDR mode: %s", streamSet ? "enabled" : "disabled"); - return; // Prepare HDR Meta Data to match the monitor HDR specifications // Retreive the monitor HDR metadata where the application is displayed int appAdapterIndex = 0; int appOutputIndex = 0; bool displaySet = false; if (SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(m_DecoderParams.window), &appAdapterIndex, &appOutputIndex)){ - IDXGIAdapter1* adapter = nullptr; - IDXGIOutput* output = nullptr; + ComPtr adapter; + ComPtr output; UINT outputIndex = appOutputIndex; if(SUCCEEDED(m_Factory->EnumAdapters1(appAdapterIndex, &adapter))){ if(SUCCEEDED(adapter->EnumOutputs(outputIndex, &output))){ - IDXGIOutput6* output6 = nullptr; + ComPtr output6; if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { DXGI_OUTPUT_DESC1 desc1; if (output6) { @@ -276,14 +342,44 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &outputHDRMetaData ); + // Set HDR Input for AMF Converter + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); + + // Values taken from AMF Sample: + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L732 + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_RANGE, AMF_COLOR_RANGE_FULL); + + AMFHDRMetadata amfHDRMetadata; + amfHDRMetadata.redPrimary[0] = amf_uint16(outputHDRMetaData.RedPrimary[0]); + amfHDRMetadata.redPrimary[1] = amf_uint16(outputHDRMetaData.RedPrimary[1]); + amfHDRMetadata.greenPrimary[0] = amf_uint16(outputHDRMetaData.GreenPrimary[0]); + amfHDRMetadata.greenPrimary[1] = amf_uint16(outputHDRMetaData.GreenPrimary[1]); + amfHDRMetadata.bluePrimary[0] = amf_uint16(outputHDRMetaData.BluePrimary[0]); + amfHDRMetadata.bluePrimary[1] = amf_uint16(outputHDRMetaData.BluePrimary[1]); + amfHDRMetadata.whitePoint[0] = amf_uint16(outputHDRMetaData.WhitePoint[0]); + amfHDRMetadata.whitePoint[1] = amf_uint16(outputHDRMetaData.WhitePoint[1]); + amfHDRMetadata.maxMasteringLuminance = amf_uint32(outputHDRMetaData.MaxMasteringLuminance); + amfHDRMetadata.minMasteringLuminance = amf_uint32(outputHDRMetaData.MinMasteringLuminance); + amfHDRMetadata.maxContentLightLevel = 0; + amfHDRMetadata.maxFrameAverageLightLevel = 0; + + amf::AMFBufferPtr pHDRMetaDataBuffer; + m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); + AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); + memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_HDR_METADATA, pData); + + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } + displaySet = true; } } - SAFE_COM_RELEASE(output6); } - SAFE_COM_RELEASE(output); } - SAFE_COM_RELEASE(adapter); } SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Set display HDR mode: %s", displaySet ? "enabled" : "disabled"); @@ -293,14 +389,21 @@ void D3D11VARenderer::setHdrMode(bool enabled){ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) { bool success = false; - IDXGIAdapter1* adapter = nullptr; + ComPtr adapter; DXGI_ADAPTER_DESC1 adapterDesc; HRESULT hr; +#ifdef QT_DEBUG + D3D_FEATURE_LEVEL featureLevel; + const D3D_FEATURE_LEVEL featureLevels[] = { + D3D_FEATURE_LEVEL_11_1, + }; +#endif + SDL_assert(m_Device == nullptr); SDL_assert(m_DeviceContext == nullptr); - hr = m_Factory->EnumAdapters1(adapterIndex, &adapter); + hr = m_Factory->EnumAdapters1(adapterIndex, adapter.GetAddressOf()); if (hr == DXGI_ERROR_NOT_FOUND) { // Expected at the end of enumeration goto Exit; @@ -332,20 +435,34 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter adapterDesc.VendorId, adapterDesc.DeviceId); - hr = D3D11CreateDevice(adapter, +#ifdef QT_DEBUG + // Generate more information about DirectX11 objects for debugging. + // https://seanmiddleditch.github.io/direct3d-11-debug-api-tricks/ + // Notes: + // * ID3D11Device Refcount: 2 => This is a normal behavior as debugDevice still need m_Device to work + // * For any other object, Refcount: 0, We can ignore IntRef value + hr = D3D11CreateDevice(adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, - D3D11_CREATE_DEVICE_VIDEO_SUPPORT - #ifdef QT_DEBUG - | D3D11_CREATE_DEVICE_DEBUG - #endif - , + D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_DEBUG, + featureLevels, + ARRAYSIZE(featureLevels), + D3D11_SDK_VERSION, + &m_Device, + &featureLevel, + &m_DeviceContext); +#else + hr = D3D11CreateDevice(adapter.Get(), + D3D_DRIVER_TYPE_UNKNOWN, + nullptr, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, nullptr, 0, D3D11_SDK_VERSION, &m_Device, nullptr, &m_DeviceContext); +#endif if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "D3D11CreateDevice() failed: %x", @@ -353,17 +470,21 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - createVideoProcessor(); + if(m_VideoEnhancement->isVideoEnhancementEnabled() && !createVideoProcessor()){ + // Disable enhancement if the Video Processor creation failed + m_VideoEnhancement->enableVideoEnhancement(false); + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "VideoProcessor failed to be created"); } - if (!checkDecoderSupport(adapter)) { + if (!checkDecoderSupport(adapter.Get())) { SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } goto Exit; } @@ -374,7 +495,6 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter if (adapterNotFound != nullptr) { *adapterNotFound = (adapter == nullptr); } - SAFE_COM_RELEASE(adapter); return success; } @@ -388,13 +508,13 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter */ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() { - IDXGIAdapter1* adapter = nullptr; + ComPtr adapter; DXGI_ADAPTER_DESC1 adapterDesc; int highestScore = -1; int adapterIndex = -1; int index = 0; - while(m_Factory->EnumAdapters1(index, &adapter) != DXGI_ERROR_NOT_FOUND) + while(m_Factory->EnumAdapters1(index, adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND) { if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { @@ -406,13 +526,15 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } if (SUCCEEDED(D3D11CreateDevice( - adapter, + adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, @@ -424,7 +546,7 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() &m_DeviceContext)) && createVideoProcessor()){ - // VSR has the priority over HDR in term of capability we want to use. + // VSR has the priority over SDR-to-HDR in term of capability we want to use. // The priority value may change over the time, // below statement has been established based on drivers' capabilities status by February 29th 2024. @@ -460,19 +582,21 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() } // Set Video enhancement information - if(adapterIndex >= 0 && m_Factory->EnumAdapters1(adapterIndex, &adapter) != DXGI_ERROR_NOT_FOUND){ + if(m_Factory->EnumAdapters1(adapterIndex, adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND){ if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } if (SUCCEEDED(D3D11CreateDevice( - adapter, + adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, @@ -486,33 +610,44 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() m_VideoEnhancement->setVendorID(adapterDesc.VendorId); - // Convert wchar[128] to string - std::wstring GPUname(adapterDesc.Description); - qInfo() << "GPU used for Video Enhancmeent: " << GPUname; - - if(m_VideoEnhancement->isVendorAMD()){ - m_VideoEnhancement->setVSRcapable(enableAMDVideoSuperResolution()); - m_VideoEnhancement->setHDRcapable(enableAMDHDR()); - } else if(m_VideoEnhancement->isVendorIntel()){ - m_VideoEnhancement->setVSRcapable(enableIntelVideoSuperResolution()); - m_VideoEnhancement->setHDRcapable(enableIntelHDR()); - } else if(m_VideoEnhancement->isVendorNVIDIA()){ - m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution()); - m_VideoEnhancement->setHDRcapable(enableNvidiaHDR()); - } + if(adapterIndex >= 0){ + // Convert wchar[128] to string + std::wstring GPUname(adapterDesc.Description); + qInfo() << "GPU used for Video Enhancement: " << GPUname; + + // Test, but do not active yet to ensure it will be reinitialize when needed + if(m_VideoEnhancement->isVendorAMD()){ + m_VideoEnhancement->setVSRcapable(enableAMDVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableAMDHDR(false)); + } else if(m_VideoEnhancement->isVendorIntel()){ + m_VideoEnhancement->setVSRcapable(enableIntelVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableIntelHDR(false)); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableNvidiaHDR(false)); + } - // Enable the visibility of Video enhancement feature in the settings of the User interface - m_VideoEnhancement->enableUIvisible(); + // Enable the visibility of Video enhancement feature in the settings of the User interface + m_VideoEnhancement->enableUIvisible(); + } else { + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + if (m_VideoProcessorCapabilities.AutoStreamCaps & D3D11_VIDEO_PROCESSOR_AUTO_STREAM_CAPS_SUPER_RESOLUTION){ + m_AutoStreamSuperResolution = true; + m_VideoEnhancement->setVSRcapable(true); + } + } } } } SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } return adapterIndex; } @@ -527,16 +662,159 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() * \return bool Return true if the capability is available */ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo){ - // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 check how to implement it + // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 - // [TODO] Implement AMD Video Scaler - // Documentation and DX11 code sample - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_VQ_Enhancer_API.md - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp + AMF_RESULT res; + amf::AMFCapsPtr amfCaps; + amf::AMFIOCapsPtr pInputCaps; + + // We skip if already initialized + if(m_AmfInitialized && activate) + return true; + + amf::AMF_SURFACE_FORMAT inputSurfaceFormat; + amf::AMF_SURFACE_FORMAT outputSurfaceFormat; + AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM outputColorSpace; + AMFColor backgroundColor = AMFConstructColor(0, 0, 0, 255); + + // AMF Context initialization + res = g_AMFFactory.Init(); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateContext(&m_AmfContext); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVQEnhancer, &m_AmfDenoiser); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverter); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfUpScaler); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfDownScaler); + if (res != AMF_OK) goto Error; + + res = m_AmfContext->InitDX11(m_Device); + if (res != AMF_OK) goto Error; + + // AMFHQScaler is the newest feature available (v1.4.33), so at least this one need to be accessible + m_AmfUpScaler->GetCaps(&amfCaps); + if (amfCaps != nullptr && amfCaps->GetAccelerationType() == amf::AMF_ACCEL_NOT_SUPPORTED) { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "The hardware does not support needed AMD AMF capabilities."); + goto Error; + } + + // Format initialization + inputSurfaceFormat = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_P010 : amf::AMF_SURFACE_NV12; + outputSurfaceFormat = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_R10G10B10A2 : amf::AMF_SURFACE_RGBA; + outputColorSpace = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) + ? AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020 + : AMF_VIDEO_CONVERTER_COLOR_PROFILE_709; + + // Input Surace initialization + res = m_AmfContext->AllocSurface(amf::AMF_MEMORY_DX11, + inputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height, + &m_AmfInputSurface); + if (res != AMF_OK) goto Error; + + // Denoiser initialization (Reduce blocking artifacts) + m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_OUTPUT_SIZE, ::AMFConstructSize(m_DecoderParams.width, m_DecoderParams.height)); + m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + m_AmfDenoiser->SetProperty(AMF_VE_FCR_ATTENUATION, 0.10); + m_AmfDenoiser->SetProperty(AMF_VE_FCR_RADIUS, 1); + res = m_AmfDenoiser->Init(inputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + // Convert to full range picture to reduce the "chroma subsampling blur" effect, and enable the use of FSR 1.1 (otherwise can only use FSR 1.0) + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, outputSurfaceFormat); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); + res = m_AmfFormatConverter->Init(inputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + // Upscale initialization + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_OutputTexture.width * m_ScaleUp, m_OutputTexture.height * m_ScaleUp)); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x + if( + m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width >= 1.1 + && m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width <= 2 + && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height >= 1.1 + && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height <= 2 + ){ + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_1); + } else { + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); + } + + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_ScaleUp == 1 ? 2.00 : 0.50); // We only apply sharpening when the picture is scaled + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); + // Initialize with the size of the texture that will be input + res = m_AmfUpScaler->Init(outputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + // Frame Generation + // Cannot use, not available for DirectX11 + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_FRC_API.md#21-component-initialization + + // Downscale (to the app window size) initialization + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_OutputTexture.width, m_OutputTexture.height)); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FILL, true); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, 2.00); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); + res = m_AmfDownScaler->Init(outputSurfaceFormat, + m_OutputTexture.width * m_ScaleUp, + m_OutputTexture.height * m_ScaleUp); + if (res != AMF_OK) goto Error; + + if(!activate){ + // Denoiser + m_AmfDenoiser->Terminate(); + m_AmfDenoiser = nullptr; + // Format converter + m_AmfFormatConverter->Terminate(); + m_AmfFormatConverter = nullptr; + // Up Scaler + m_AmfUpScaler->Terminate(); + m_AmfUpScaler = nullptr; + // Down Scaler + m_AmfDownScaler->Terminate(); + m_AmfDownScaler = nullptr; + // Context + m_AmfContext->Terminate(); + m_AmfContext = nullptr; + // Factory + g_AMFFactory.Terminate(); + + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution disabled"); + } else { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution enabled"); + } + + m_AmfInitialized = activate; + return true; - if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution capability is not yet supported by your client's GPU."); +Error: + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution failed."); + m_AmfInitialized = false; return false; } @@ -771,6 +1049,10 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) m_DecoderParams = *params; + // Use only even number to avoid a crash a texture creation + m_DecoderParams.width = m_DecoderParams.width & ~1; + m_DecoderParams.height = m_DecoderParams.height & ~1; + if (qgetenv("D3D11VA_ENABLED") == "0") { SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "D3D11VA is disabled by environment variable"); @@ -795,7 +1077,39 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - hr = CreateDXGIFactory(__uuidof(IDXGIFactory5), (void**)&m_Factory); + // Use the current window size as the swapchain size + SDL_GetWindowSize(m_DecoderParams.window, (int*)&m_DisplayWidth, (int*)&m_DisplayHeight); + + // Rounddown to even number to avoid a crash at texture creation + m_DisplayWidth = m_DisplayWidth & ~1; + m_DisplayHeight = m_DisplayHeight & ~1; + + // As m_Display correponds to the application window, which may not have the same ratio as the Frame, + // we calculate the size of the final texture to fit in the window without distortion + m_OutputTexture.width = m_DisplayWidth; + m_OutputTexture.height = m_DisplayHeight; + m_OutputTexture.left = 0; + m_OutputTexture.top = 0; + + // Sscale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + + if(ratioHeight < ratioWidth){ + // Adjust the Width + m_OutputTexture.width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); + m_OutputTexture.width = m_OutputTexture.width & ~1; + m_OutputTexture.left = static_cast(std::floor( abs(m_DisplayWidth - m_OutputTexture.width) / 2 )); + m_OutputTexture.left = m_OutputTexture.left & ~1; + } else if(ratioWidth < ratioHeight) { + // Adjust the Height + m_OutputTexture.height = static_cast(std::floor(m_DecoderParams.height * ratioWidth)); + m_OutputTexture.height = m_OutputTexture.height & ~1; + m_OutputTexture.top = static_cast(std::floor( abs(m_DisplayHeight - m_OutputTexture.height) / 2 )); + m_OutputTexture.top = m_OutputTexture.top & ~1; + } + + hr = CreateDXGIFactory(__uuidof(IDXGIFactory5), (void**)m_Factory.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "CreateDXGIFactory() failed: %x", @@ -806,6 +1120,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // If getAdapterIndex return 0+, it means that we already identified which adapter best fit for Video enhancement, // so we don't have to estimate it more times to speed up the launch of the streaming. if(m_VideoEnhancement->getAdapterIndex() < 0){ + // This line is run only once during the application life and is necessary to display (or not) + // the Video enhancement checkbox if the GPU enables it int adapterIndex = getAdapterIndexByEnhancementCapabilities(); if(adapterIndex >= 0){ m_VideoEnhancement->setAdapterIndex(adapterIndex); @@ -851,9 +1167,27 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // Set VSR and HDR if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // We draw on a bigger output, this will give more space to any vendor Scale Up solution to generate more + // details with less artifacts around contrasted borders. + m_ScaleUp = 2; + if(m_DecoderParams.width == m_OutputTexture.width || m_DecoderParams.height == m_OutputTexture.height){ + // We don't scale up when the pixel ratio is 1:1 between the input frame and the output texture, + // it help to keep perfect pixel matching from original + m_ScaleUp = 1; + } + if(m_DisplayWidth > 2560 || m_DisplayHeight > 1440){ + // For anything bigger than 1440p, we don't scale as it will require to much ressources for low-end devices. + // We want to keep a ratio 1:1 pixel to avoid blur effect when the texture is scale down at rendering. + m_ScaleUp = 1; + } // Enable VSR feature if available if(m_VideoEnhancement->isVSRcapable()){ - if(m_VideoEnhancement->isVendorAMD()){ + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + if (m_AutoStreamSuperResolution){ + // The flag does exist, but not the method yet (by March 8th, 2024) + // We still can prepare the code once Microsof enable it. + // m_VideoContext->VideoProcessorSetStreamSuperResolution(m_VideoProcessor.Get(), 0, true); + } else if(m_VideoEnhancement->isVendorAMD()){ enableAMDVideoSuperResolution(); } else if(m_VideoEnhancement->isVendorIntel()){ enableIntelVideoSuperResolution(); @@ -902,12 +1236,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // causes performance issues (buffer starvation) on AMD GPUs. swapChainDesc.BufferCount = 3 + 1 + 1; - // Use the current window size as the swapchain size - SDL_GetWindowSize(params->window, (int*)&swapChainDesc.Width, (int*)&swapChainDesc.Height); - - m_DisplayWidth = swapChainDesc.Width; - m_DisplayHeight = swapChainDesc.Height; - + swapChainDesc.Width = m_DisplayWidth; + swapChainDesc.Height = m_DisplayHeight; if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) { swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM; } @@ -951,7 +1281,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // DXVA2 may let us take over for FSE V-sync off cases. However, if we don't have DXGI_FEATURE_PRESENT_ALLOW_TEARING // then we should not attempt to do this unless there's no other option (HDR, DXVA2 failed in pass 1, etc). if (!m_AllowTearing && m_DecoderSelectionPass == 0 && !(params->videoFormat & VIDEO_FORMAT_MASK_10BIT) && - (SDL_GetWindowFlags(params->window) & SDL_WINDOW_FULLSCREEN_DESKTOP) == SDL_WINDOW_FULLSCREEN) { + (SDL_GetWindowFlags(params->window) & SDL_WINDOW_FULLSCREEN_DESKTOP) == SDL_WINDOW_FULLSCREEN) { SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, "Defaulting to DXVA2 for FSE without DXGI_FEATURE_PRESENT_ALLOW_TEARING support"); return false; @@ -965,13 +1295,13 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // Always use windowed or borderless windowed mode.. SDL does mode-setting for us in // full-screen exclusive mode (SDL_WINDOW_FULLSCREEN), so this actually works out okay. - IDXGISwapChain1* swapChain; + ComPtr swapChain; hr = m_Factory->CreateSwapChainForHwnd(m_Device, info.info.win.window, &swapChainDesc, nullptr, nullptr, - &swapChain); + swapChain.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -980,8 +1310,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - hr = swapChain->QueryInterface(__uuidof(IDXGISwapChain4), (void**)&m_SwapChain); - swapChain->Release(); + hr = swapChain->QueryInterface(__uuidof(IDXGISwapChain4), (void**)m_SwapChain.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1034,8 +1363,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } - // Create our video texture and SRVs - if (!setupVideoTexture()) { + // Create our video textures and SRVs + if (!setupVideoTexture() || !setupFrameTexture()) { return false; } @@ -1043,8 +1372,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) initializeVideoProcessor(); } - SAFE_COM_RELEASE(m_BackBufferResource); - return true; } @@ -1107,7 +1434,7 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020) failed: %x", hr); } - if (m_VideoProcessor && m_VideoProcessorEnumerator) { + if (m_VideoProcessor) { m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); }; } @@ -1119,7 +1446,7 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709) failed: %x", hr); } - if (m_VideoProcessor && m_VideoProcessorEnumerator) { + if (m_VideoProcessor) { m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } } @@ -1157,11 +1484,11 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type) return; } - ID3D11Texture2D* overlayTexture = m_OverlayTextures[type]; - ID3D11Buffer* overlayVertexBuffer = m_OverlayVertexBuffers[type]; - ID3D11ShaderResourceView* overlayTextureResourceView = m_OverlayTextureResourceViews[type]; + ComPtr overlayTexture = m_OverlayTextures[type].Get(); + ComPtr overlayVertexBuffer = m_OverlayVertexBuffers[type].Get(); + ComPtr overlayTextureResourceView = m_OverlayTextureResourceViews[type].Get(); - if (overlayTexture == nullptr) { + if (overlayTexture.Get() == nullptr) { SDL_AtomicUnlock(&m_OverlayLock); return; } @@ -1169,27 +1496,20 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type) // Reference these objects so they don't immediately go away if the // overlay update thread tries to release them. SDL_assert(overlayVertexBuffer != nullptr); - overlayTexture->AddRef(); - overlayVertexBuffer->AddRef(); - overlayTextureResourceView->AddRef(); SDL_AtomicUnlock(&m_OverlayLock); // Bind vertex buffer UINT stride = sizeof(VERTEX); UINT offset = 0; - m_DeviceContext->IASetVertexBuffers(0, 1, &overlayVertexBuffer, &stride, &offset); + m_DeviceContext->IASetVertexBuffers(0, 1, overlayVertexBuffer.GetAddressOf(), &stride, &offset); // Bind pixel shader and resources - m_DeviceContext->PSSetShader(m_OverlayPixelShader, nullptr, 0); - m_DeviceContext->PSSetShaderResources(0, 1, &overlayTextureResourceView); + m_DeviceContext->PSSetShader(m_OverlayPixelShader.Get(), nullptr, 0); + m_DeviceContext->PSSetShaderResources(0, 1, overlayTextureResourceView.GetAddressOf()); // Draw the overlay m_DeviceContext->DrawIndexed(6, 0, 0); - - overlayTextureResourceView->Release(); - overlayTexture->Release(); - overlayVertexBuffer->Release(); } void D3D11VARenderer::bindColorConversion(AVFrame* frame) @@ -1199,14 +1519,14 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) // We have purpose-built shaders for the common Rec 601 (SDR) and Rec 2020 (HDR) cases if (!fullRange && colorspace == COLORSPACE_REC_601) { - m_DeviceContext->PSSetShader(m_VideoBt601LimPixelShader, nullptr, 0); + m_DeviceContext->PSSetShader(m_VideoBt601LimPixelShader.Get(), nullptr, 0); } else if (!fullRange && colorspace == COLORSPACE_REC_2020) { - m_DeviceContext->PSSetShader(m_VideoBt2020LimPixelShader, nullptr, 0); + m_DeviceContext->PSSetShader(m_VideoBt2020LimPixelShader.Get(), nullptr, 0); } else { // We'll need to use the generic shader for this colorspace and color range combo - m_DeviceContext->PSSetShader(m_VideoGenericPixelShader, nullptr, 0); + m_DeviceContext->PSSetShader(m_VideoGenericPixelShader.Get(), nullptr, 0); // If nothing has changed since last frame, we're done if (colorspace == m_LastColorSpace && fullRange == m_LastFullRange) { @@ -1259,11 +1579,11 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) D3D11_SUBRESOURCE_DATA constData = {}; constData.pSysMem = &constBuf; - ID3D11Buffer* constantBuffer; - HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer); + ComPtr constantBuffer; + HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, constantBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->PSSetConstantBuffers(0, 1, &constantBuffer); - constantBuffer->Release(); + m_DeviceContext->PSSetConstantBuffers(0, 1, constantBuffer.GetAddressOf()); + // constantBuffer->Release(); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1306,33 +1626,41 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) switch (frameColorSpace) { case COLORSPACE_REC_2020: // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disabled (which is fine as we already have native HDR) - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 : DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); if(m_VideoEnhancement->isVendorNVIDIA()){ // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. } + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, frameFullRange ? AMF_COLOR_RANGE_FULL : AMF_COLOR_RANGE_STUDIO); + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } break; default: // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709 : DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); if(m_VideoEnhancement->isVendorNVIDIA()){ // Always enable NVIDIA VSR for SDR Stream content enableNvidiaVideoSuperResolution(); } + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, false); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_UNDEFINED); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_UNDEFINED); + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } } } void D3D11VARenderer::renderVideo(AVFrame* frame) { - // Bind video rendering vertex buffer - UINT stride = sizeof(VERTEX); - UINT offset = 0; - m_DeviceContext->IASetVertexBuffers(0, 1, &m_VideoVertexBuffer, &stride, &offset); - - // Copy this frame (minus alignment padding) into our video texture D3D11_BOX srcBox; srcBox.left = 0; srcBox.top = 0; @@ -1340,7 +1668,57 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) srcBox.bottom = m_DecoderParams.height; srcBox.front = 0; srcBox.back = 1; - m_DeviceContext->CopySubresourceRegion(m_VideoTexture, 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + + // Setup for AMD AMF + if(m_AmfInitialized){ + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_FrameTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + m_AmfContext->CreateSurfaceFromDX11Native(m_FrameTexture.Get(), &m_AmfInputSurface, nullptr); + + amf::AMFDataPtr m_AmfData(m_AmfInputSurface); + + // Denoisier => Reduce deblocking artifacts due to compressed streamed content + m_AmfDenoiser->SubmitInput(m_AmfData); + m_AmfDenoiser->QueryOutput(&m_AmfData); + m_AmfDenoiser->Flush(); + + // Format converter => To provide best color rendering + m_AmfFormatConverter->SubmitInput(m_AmfData); + m_AmfFormatConverter->QueryOutput(&m_AmfData); + m_AmfFormatConverter->Flush(); + + // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity + m_AmfUpScaler->SubmitInput(m_AmfData); + m_AmfUpScaler->QueryOutput(&m_AmfData); + m_AmfUpScaler->Flush(); + + // We don't need to scale down if the pixel ratio is already 1:1 + if(m_ScaleUp != 1){ + // Down Scaling => To avoid a blur effect if relying on VideoProcessor, this method provides clear pixel rendering + m_AmfDownScaler->SubmitInput(m_AmfData); + m_AmfDownScaler->QueryOutput(&m_AmfData); + m_AmfDownScaler->Flush(); + } + + amf::AMFSurfacePtr amfOutputSurface(m_AmfData); + m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)amfOutputSurface->GetPlaneAt(0)->GetNative()); + } else { + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + } + + } else { + + // Bind video rendering vertex buffer + UINT stride = sizeof(VERTEX); + UINT offset = 0; + m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset); + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + + } // Draw the video if(m_VideoEnhancement->isVideoEnhancementEnabled()){ @@ -1372,19 +1750,23 @@ bool D3D11VARenderer::createVideoProcessor() HRESULT hr; D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } // Get video device hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), - (void**)&m_VideoDevice); + (void**)m_VideoDevice.GetAddressOf()); if (FAILED(hr)) { return false; } // Get video context hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), - (void**)&m_VideoContext); + (void**)m_VideoContext.GetAddressOf()); if (FAILED(hr)) { return false; } @@ -1399,7 +1781,7 @@ bool D3D11VARenderer::createVideoProcessor() content_desc.OutputHeight = m_DisplayHeight; content_desc.OutputFrameRate.Numerator = m_DecoderParams.frameRate; content_desc.OutputFrameRate.Denominator = 1; - content_desc.Usage = D3D11_VIDEO_USAGE_OPTIMAL_SPEED; + content_desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; hr = m_VideoDevice->CreateVideoProcessorEnumerator(&content_desc, &m_VideoProcessorEnumerator); if (FAILED(hr)) @@ -1410,6 +1792,11 @@ bool D3D11VARenderer::createVideoProcessor() if (FAILED(hr)) return false; + hr = m_VideoProcessorEnumerator->GetVideoProcessorCaps(&m_VideoProcessorCapabilities); + if (FAILED(hr)) { + return false; + } + return true; } @@ -1423,53 +1810,50 @@ bool D3D11VARenderer::createVideoProcessor() bool D3D11VARenderer::initializeVideoProcessor() { HRESULT hr; + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc; + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc; - m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor.Get(), 0, false); m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); // Set Background color D3D11_VIDEO_COLOR bgColor; - bgColor.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color - m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), true, &bgColor); + bgColor.RGBA = { 0, 0, 0, 1 }; // black color + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), false, &bgColor); - ZeroMemory(&m_OutputViewDesc, sizeof(m_OutputViewDesc)); - m_OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; - m_OutputViewDesc.Texture2D.MipSlice = 0; + ZeroMemory(&outputViewDesc, sizeof(outputViewDesc)); + outputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; + outputViewDesc.Texture2D.MipSlice = 0; hr = m_VideoDevice->CreateVideoProcessorOutputView( - m_BackBufferResource, + m_BackBufferResource.Get(), m_VideoProcessorEnumerator.Get(), - &m_OutputViewDesc, + &outputViewDesc, (ID3D11VideoProcessorOutputView**)&m_OutputView); - if (FAILED(hr)) { + if (FAILED(hr)) return false; - } - ZeroMemory(&m_InputViewDesc, sizeof(m_InputViewDesc)); - m_InputViewDesc.FourCC = 0; - m_InputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; - m_InputViewDesc.Texture2D.MipSlice = 0; - m_InputViewDesc.Texture2D.ArraySlice = 0; + ZeroMemory(&inputViewDesc, sizeof(inputViewDesc)); + inputViewDesc.FourCC = 0; + inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + inputViewDesc.Texture2D.MipSlice = 0; + inputViewDesc.Texture2D.ArraySlice = 0; hr = m_VideoDevice->CreateVideoProcessorInputView( - m_VideoTexture, m_VideoProcessorEnumerator.Get(), &m_InputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); + m_VideoTexture.Get(), m_VideoProcessorEnumerator.Get(), &inputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); if (FAILED(hr)) return false; RECT dstRect = { 0 }; dstRect.right = m_DisplayWidth; dstRect.bottom = m_DisplayHeight; + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + dstRect.right = m_OutputTexture.width; + dstRect.bottom = m_OutputTexture.height; - // Sscale the source to the destination surface while keeping the same ratio - float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); - float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); - - // [TODO] There is a behavior I don't understand (bug?) when the destination desRect is larger by one of its side than the source. - // If it is bigger, the window becomes black, but if it is smaller it is fine. - // Only one case is working when it is bigger is when the dstRest perfectly equal to the Display size. - // Investigation: If there anything to do with pixel alignment (c.f. dxva2.cpp FFALIGN), or screenSpaceToNormalizedDeviceCoords ? - // Fix: When bigger we strech the picture to the window, it will be deformed, but at least will not crash. - if(m_DisplayWidth < m_DecoderParams.width && m_DisplayHeight < m_DecoderParams.height){ + // Scale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + // When VSR for NVIDIA is in use in Window mode, it may bug (black screen) the rendering while streatching bigger the window if(ratioHeight < ratioWidth){ // Adjust the Width long width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); @@ -1482,8 +1866,8 @@ bool D3D11VARenderer::initializeVideoProcessor() dstRect.bottom = dstRect.top + height; } } - m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &dstRect); + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); ZeroMemory(&m_StreamData, sizeof(m_StreamData)); @@ -1506,23 +1890,27 @@ bool D3D11VARenderer::initializeVideoProcessor() m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } - // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance - int noiseReduction = 0; - int edgeEnhancement = 0; + // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance. + // It does work in addition to AI-enhancement for better result. if(m_VideoEnhancement->isVendorAMD()){ - noiseReduction = 30; - edgeEnhancement = 30; + // AMD has its own filters } else if(m_VideoEnhancement->isVendorIntel()){ - noiseReduction = 30; - edgeEnhancement = 30; + // Reduce blocking artifacts + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 30); // (0 / 0 / 100) + } else if(m_VideoEnhancement->isVendorNVIDIA()){ - noiseReduction = 30; - edgeEnhancement = 30; + // Reduce blocking artifacts + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 30); // (0 / 0 / 100) + } - // Reduce artefacts (like pixelisation around text), does work in addition to AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, noiseReduction); // (0 / 0 / 100) - // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, edgeEnhancement); // (0 / 0 / 100) // Default on SDR, it will switch to HDR automatically at the 1st frame received if the Stream source has HDR active. m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); @@ -1543,21 +1931,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) return; } - SDL_AtomicLock(&m_OverlayLock); - ID3D11Texture2D* oldTexture = m_OverlayTextures[type]; - m_OverlayTextures[type] = nullptr; - - ID3D11Buffer* oldVertexBuffer = m_OverlayVertexBuffers[type]; - m_OverlayVertexBuffers[type] = nullptr; - - ID3D11ShaderResourceView* oldTextureResourceView = m_OverlayTextureResourceViews[type]; - m_OverlayTextureResourceViews[type] = nullptr; - SDL_AtomicUnlock(&m_OverlayLock); - - SAFE_COM_RELEASE(oldTextureResourceView); - SAFE_COM_RELEASE(oldTexture); - SAFE_COM_RELEASE(oldVertexBuffer); - // If the overlay is disabled, we're done if (!overlayEnabled) { SDL_FreeSurface(newSurface); @@ -1585,21 +1958,18 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) texData.pSysMem = newSurface->pixels; texData.SysMemPitch = newSurface->pitch; - ID3D11Texture2D* newTexture; - hr = m_Device->CreateTexture2D(&texDesc, &texData, &newTexture); + ComPtr newTexture; + hr = m_Device->CreateTexture2D(&texDesc, &texData, newTexture.GetAddressOf()); if (FAILED(hr)) { - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", hr); return; } - ID3D11ShaderResourceView* newTextureResourceView = nullptr; - hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture, nullptr, &newTextureResourceView); + ComPtr newTextureResourceView; + hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture.Get(), nullptr, newTextureResourceView.GetAddressOf()); if (FAILED(hr)) { - SAFE_COM_RELEASE(newTexture); - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateShaderResourceView() failed: %x", hr); @@ -1648,11 +2018,9 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) D3D11_SUBRESOURCE_DATA vbData = {}; vbData.pSysMem = verts; - ID3D11Buffer* newVertexBuffer; - hr = m_Device->CreateBuffer(&vbDesc, &vbData, &newVertexBuffer); + ComPtr newVertexBuffer; + hr = m_Device->CreateBuffer(&vbDesc, &vbData, newVertexBuffer.GetAddressOf()); if (FAILED(hr)) { - SAFE_COM_RELEASE(newTextureResourceView); - SAFE_COM_RELEASE(newTexture); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", hr); @@ -1660,24 +2028,18 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) } SDL_AtomicLock(&m_OverlayLock); - m_OverlayVertexBuffers[type] = newVertexBuffer; - m_OverlayTextures[type] = newTexture; - m_OverlayTextureResourceViews[type] = newTextureResourceView; + m_OverlayVertexBuffers[type] = newVertexBuffer.Get(); + m_OverlayTextures[type] = newTexture.Get(); + m_OverlayTextureResourceViews[type] = newTextureResourceView.Get(); SDL_AtomicUnlock(&m_OverlayLock); } bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) { HRESULT hr; - ID3D11VideoDevice* videoDevice; - // Derive a ID3D11VideoDevice from our ID3D11Device. - hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&videoDevice); - if (FAILED(hr)) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::QueryInterface(ID3D11VideoDevice) failed: %x", - hr); - return false; + if(m_VideoDevice == nullptr){ + createVideoProcessor(); } // Check if the format is supported by this decoder @@ -1685,88 +2047,75 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support H.264 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support H.264 decoding to NV12 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_H265: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC decoding to NV12 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_H265_MAIN10: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main10 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main10 decoding to P010 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_AV1_MAIN8: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 decoding to NV12 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_AV1_MAIN10: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 Main10 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 Main10 decoding to P010 format"); - videoDevice->Release(); return false; } break; default: SDL_assert(false); - videoDevice->Release(); return false; } - videoDevice->Release(); - DXGI_ADAPTER_DESC adapterDesc; hr = adapter->GetDesc(&adapterDesc); if (FAILED(hr)) { @@ -1843,11 +2192,10 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray vertexShaderBytecode = Path::readDataFile("d3d11_vertex.fxc"); - ID3D11VertexShader* vertexShader; - hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, &vertexShader); + ComPtr vertexShader; + hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, vertexShader.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->VSSetShader(vertexShader, nullptr, 0); - vertexShader->Release(); + m_DeviceContext->VSSetShader(vertexShader.Get(), nullptr, 0); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1861,11 +2209,10 @@ bool D3D11VARenderer::setupRenderingResources() { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 }, }; - ID3D11InputLayout* inputLayout; - hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), &inputLayout); + ComPtr inputLayout; + hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), inputLayout.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->IASetInputLayout(inputLayout); - inputLayout->Release(); + m_DeviceContext->IASetInputLayout(inputLayout.Get()); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1878,7 +2225,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray overlayPixelShaderBytecode = Path::readDataFile("d3d11_overlay_pixel.fxc"); - hr = m_Device->CreatePixelShader(overlayPixelShaderBytecode.constData(), overlayPixelShaderBytecode.length(), nullptr, &m_OverlayPixelShader); + hr = m_Device->CreatePixelShader(overlayPixelShaderBytecode.constData(), overlayPixelShaderBytecode.length(), nullptr, m_OverlayPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1890,7 +2237,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray videoPixelShaderBytecode = Path::readDataFile("d3d11_genyuv_pixel.fxc"); - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoGenericPixelShader); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoGenericPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1902,7 +2249,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray videoPixelShaderBytecode = Path::readDataFile("d3d11_bt601lim_pixel.fxc"); - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoBt601LimPixelShader); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoBt601LimPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1914,7 +2261,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray videoPixelShaderBytecode = Path::readDataFile("d3d11_bt2020lim_pixel.fxc"); - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoBt2020LimPixelShader); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoBt2020LimPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1936,11 +2283,10 @@ bool D3D11VARenderer::setupRenderingResources() samplerDesc.MinLOD = 0.0f; samplerDesc.MaxLOD = D3D11_FLOAT32_MAX; - ID3D11SamplerState* sampler; - hr = m_Device->CreateSamplerState(&samplerDesc, &sampler); + ComPtr sampler; + hr = m_Device->CreateSamplerState(&samplerDesc, sampler.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->PSSetSamplers(0, 1, &sampler); - sampler->Release(); + m_DeviceContext->PSSetSamplers(0, 1, sampler.GetAddressOf()); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1952,7 +2298,7 @@ bool D3D11VARenderer::setupRenderingResources() // Create our render target view { - hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&m_BackBufferResource); + hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)m_BackBufferResource.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "IDXGISwapChain::GetBuffer() failed: %x", @@ -1960,7 +2306,7 @@ bool D3D11VARenderer::setupRenderingResources() return false; } - hr = m_Device->CreateRenderTargetView(m_BackBufferResource, nullptr, &m_RenderTargetView); + hr = m_Device->CreateRenderTargetView(m_BackBufferResource.Get(), nullptr, &m_RenderTargetView); // m_BackBufferResource is still needed in createVideoProcessor(), therefore will be released later if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1985,11 +2331,10 @@ bool D3D11VARenderer::setupRenderingResources() indexBufferData.pSysMem = indexes; indexBufferData.SysMemPitch = sizeof(int); - ID3D11Buffer* indexBuffer; - hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, &indexBuffer); + ComPtr indexBuffer; + hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, indexBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->IASetIndexBuffer(indexBuffer, DXGI_FORMAT_R32_UINT, 0); - indexBuffer->Release(); + m_DeviceContext->IASetIndexBuffer(indexBuffer.Get(), DXGI_FORMAT_R32_UINT, 0); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2034,7 +2379,7 @@ bool D3D11VARenderer::setupRenderingResources() D3D11_SUBRESOURCE_DATA vbData = {}; vbData.pSysMem = verts; - hr = m_Device->CreateBuffer(&vbDesc, &vbData, &m_VideoVertexBuffer); + hr = m_Device->CreateBuffer(&vbDesc, &vbData, m_VideoVertexBuffer.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", @@ -2057,11 +2402,11 @@ bool D3D11VARenderer::setupRenderingResources() blendDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD; blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL; - ID3D11BlendState* blendState; - hr = m_Device->CreateBlendState(&blendDesc, &blendState); + ComPtr blendState; + hr = m_Device->CreateBlendState(&blendDesc, blendState.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->OMSetBlendState(blendState, nullptr, 0xffffffff); - blendState->Release(); + m_DeviceContext->OMSetBlendState(blendState.Get(), nullptr, 0xffffffff); + // blendState->Release(); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2093,25 +2438,37 @@ bool D3D11VARenderer::setupVideoTexture() HRESULT hr; D3D11_TEXTURE2D_DESC texDesc = {}; + // Size of the output texture texDesc.Width = m_DecoderParams.width; texDesc.Height = m_DecoderParams.height; + if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ + texDesc.Width = m_OutputTexture.width; + texDesc.Height = m_OutputTexture.height; + } texDesc.MipLevels = 1; texDesc.ArraySize = 1; - texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R10G10B10A2_UNORM : DXGI_FORMAT_R8G8B8A8_UNORM; + } else { + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + } texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement - if(m_DecoderParams.enableVideoEnhancement && m_VideoEnhancement->isEnhancementCapable()){ + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; } texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; - hr = m_Device->CreateTexture2D(&texDesc, nullptr, &m_VideoTexture); + if(m_AmfInitialized){ + texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; + } + + hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_VideoTexture.GetAddressOf()); if (FAILED(hr)) { - m_VideoTexture = nullptr; SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", hr); @@ -2124,7 +2481,7 @@ bool D3D11VARenderer::setupVideoTexture() srvDesc.Texture2D.MostDetailedMip = 0; srvDesc.Texture2D.MipLevels = 1; srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture, &srvDesc, &m_VideoTextureResourceViews[0]); + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0]); if (FAILED(hr)) { m_VideoTextureResourceViews[0] = nullptr; SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2134,7 +2491,7 @@ bool D3D11VARenderer::setupVideoTexture() } srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture, &srvDesc, &m_VideoTextureResourceViews[1]); + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[1]); if (FAILED(hr)) { m_VideoTextureResourceViews[1] = nullptr; SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2145,3 +2502,33 @@ bool D3D11VARenderer::setupVideoTexture() return true; } + +bool D3D11VARenderer::setupFrameTexture() +{ + // Texture description + D3D11_TEXTURE2D_DESC texDesc = {}; + // Same size as the input Frame + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + texDesc.MipLevels = 1; + texDesc.ArraySize = 1; + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + texDesc.SampleDesc.Quality = 0; + texDesc.SampleDesc.Count = 1; + texDesc.Usage = D3D11_USAGE_DEFAULT; + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; + } + texDesc.CPUAccessFlags = 0; + texDesc.MiscFlags = 0; + + HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_FrameTexture.GetAddressOf()); + if (FAILED(hr)) { + // Handle error + return false; + } + + return true; +} diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index ad5c24221..7c6bf6ad5 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -8,11 +8,14 @@ #include #include #include "streaming/video/videoenhancement.h" +#include "public/common/AMFFactory.h" extern "C" { #include } +using Microsoft::WRL::ComPtr; + class D3D11VARenderer : public IFFmpegRenderer { public: @@ -33,6 +36,7 @@ class D3D11VARenderer : public IFFmpegRenderer bool setupRenderingResources(); bool setupVideoTexture(); + bool setupFrameTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); void prepareVideoProcessorStream(AVFrame* frame); @@ -53,24 +57,25 @@ class D3D11VARenderer : public IFFmpegRenderer int m_AdapterIndex = 0; int m_OutputIndex = 0; - IDXGIFactory5* m_Factory; + ComPtr m_Factory; + // Cannt convert to ComPtr because of av_buffer_unref() ID3D11Device* m_Device; - IDXGISwapChain4* m_SwapChain; ID3D11DeviceContext* m_DeviceContext; + ComPtr m_SwapChain; ID3D11RenderTargetView* m_RenderTargetView; SDL_mutex* m_ContextLock; - ID3D11VideoDevice* m_VideoDevice; - ID3D11VideoContext2* m_VideoContext; - Microsoft::WRL::ComPtr m_VideoProcessor; - Microsoft::WRL::ComPtr m_VideoProcessorEnumerator; - D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC m_OutputViewDesc; - D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC m_InputViewDesc; + ComPtr m_VideoDevice; + ComPtr m_VideoContext; + ComPtr m_VideoProcessor; + ComPtr m_VideoProcessorEnumerator; + D3D11_VIDEO_PROCESSOR_CAPS m_VideoProcessorCapabilities; D3D11_VIDEO_PROCESSOR_STREAM m_StreamData; - Microsoft::WRL::ComPtr m_OutputView; - Microsoft::WRL::ComPtr m_InputView; - ID3D11Resource* m_BackBufferResource; + ComPtr m_OutputView; + ComPtr m_InputView; + ComPtr m_BackBufferResource; VideoEnhancement* m_VideoEnhancement; + bool m_AutoSTreamSuperResolution = false; // Variable unused, but keep it as reference for debugging purpose DXGI_COLOR_SPACE_TYPE m_ColorSpaces[26] = { @@ -102,8 +107,6 @@ class D3D11VARenderer : public IFFmpegRenderer DXGI_COLOR_SPACE_CUSTOM, // 25 }; - ID3D11ShaderResourceView* m_VideoTextureResourceView; - DECODER_PARAMETERS m_DecoderParams; int m_DisplayWidth; int m_DisplayHeight; @@ -113,20 +116,41 @@ class D3D11VARenderer : public IFFmpegRenderer bool m_AllowTearing; - ID3D11PixelShader* m_VideoGenericPixelShader; - ID3D11PixelShader* m_VideoBt601LimPixelShader; - ID3D11PixelShader* m_VideoBt2020LimPixelShader; - ID3D11Buffer* m_VideoVertexBuffer; + ComPtr m_VideoGenericPixelShader; + ComPtr m_VideoBt601LimPixelShader; + ComPtr m_VideoBt2020LimPixelShader; + ComPtr m_VideoVertexBuffer; - ID3D11Texture2D* m_VideoTexture; + ComPtr m_FrameTexture; + ComPtr m_VideoTexture; ID3D11ShaderResourceView* m_VideoTextureResourceViews[2]; + float m_ScaleUp = 1; + struct { + int width; + int height; + int left; + int top; + } m_OutputTexture; + SDL_SpinLock m_OverlayLock; - ID3D11Buffer* m_OverlayVertexBuffers[Overlay::OverlayMax]; - ID3D11Texture2D* m_OverlayTextures[Overlay::OverlayMax]; - ID3D11ShaderResourceView* m_OverlayTextureResourceViews[Overlay::OverlayMax]; - ID3D11PixelShader* m_OverlayPixelShader; + ComPtr m_OverlayVertexBuffers[Overlay::OverlayMax]; + ComPtr m_OverlayTextures[Overlay::OverlayMax]; + ComPtr m_OverlayTextureResourceViews[Overlay::OverlayMax]; + ComPtr m_OverlayPixelShader; AVBufferRef* m_HwDeviceContext; + + // AMD (AMF) + amf::AMFContextPtr m_AmfContext; + amf::AMFSurfacePtr m_AmfInputSurface; + amf::AMFComponentPtr m_AmfDenoiser; + amf::AMFComponentPtr m_AmfFormatConverter; + amf::AMFComponentPtr m_AmfUpScaler; + // amf::AMFComponentPtr does not work for m_AmfDownScaler, have to use raw pointer + amf::AMFComponent* m_AmfDownScaler; + + bool m_AmfInitialized = false; + }; diff --git a/third-party/AMF b/third-party/AMF new file mode 160000 index 000000000..85eea8d43 --- /dev/null +++ b/third-party/AMF @@ -0,0 +1 @@ +Subproject commit 85eea8d43511967dcf98f063d3d3efa573536ae3 From 63101e0fdaa264f57e7dffa9dda2d39077ad3cff Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Wed, 27 Mar 2024 17:06:33 +0100 Subject: [PATCH 31/53] Add AMD Video Enhancement feature Use of AMD AMF to support Video upscaling using FSR 1.1 and Denoiser to improve picture rendering quality on AMD 7000+ Series. - Import AMF SDK as a Git submodule into the folder "third-party/AMF" - Load from app.pro only AMF libraries used by Moonlight - In UI, add a comment that Video Enhancement still have issues for HDR rendering - Wherever possible, use Microsoft::WRL::ComPtr to let Windows release objects and clear the memory efficiently by itself, making the code cleaner and easier to maintain - Add DirectX object debug messages - Add AMF HDR settings for Input and Output - Add AMF Video enhancement, Upscaling and Denoiser - Round Output Width and Height values to even numbers to avoid the rendering to crash when manually resize - Add Full range Color Space for VideoProcessor Correct variable name --- .gitmodules | 3 + app/app.pro | 12 + app/gui/SettingsView.qml | 1 + .../video/ffmpeg-renderers/d3d11va.cpp | 973 ++++++++++++------ .../video/ffmpeg-renderers/d3d11va.h | 68 +- third-party/AMF | 1 + 6 files changed, 743 insertions(+), 315 deletions(-) create mode 160000 third-party/AMF diff --git a/.gitmodules b/.gitmodules index 0b50efae7..4e39dbf33 100644 --- a/.gitmodules +++ b/.gitmodules @@ -17,3 +17,6 @@ path = libs url = https://github.com/cgutman/moonlight-qt-prebuilts.git shallow = true +[submodule "third-party/AMF"] + path = third-party/AMF + url = https://github.com/GPUOpen-LibrariesAndSDKs/AMF.git diff --git a/app/app.pro b/app/app.pro index e46f8e683..76739197b 100644 --- a/app/app.pro +++ b/app/app.pro @@ -383,6 +383,18 @@ win32:!winrt { streaming/video/ffmpeg-renderers/d3d11va.h \ streaming/video/ffmpeg-renderers/pacer/dxvsyncsource.h } +win32:!winrt { + message(AMF enabled for AMD Drivers) + + SOURCES += \ + ../third-party/AMF/amf/public/common/AMFFactory.cpp \ + ../third-party/AMF/amf/public/common/AMFSTL.cpp \ + ../third-party/AMF/amf/public/common/Thread.cpp \ + ../third-party/AMF/amf/public/common/TraceAdapter.cpp \ + ../third-party/AMF/amf/public/common/Windows\ThreadWindows.cpp + + INCLUDEPATH += $$PWD/../third-party/AMF/amf +} macx { message(VideoToolbox renderer selected) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index fb0a39127..ba4404703 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -834,6 +834,7 @@ Flickable { + qsTr("\nThis feature effectively upscales, reduces compression artifacts and enhances the clarity of streamed content.") + qsTr("\nNote:") + qsTr("\n - If available, ensure that appropriate settings (i.e. RTX Video enhancement) are enabled in your GPU driver configuration.") + + qsTr("\n - HDR rendering has divers issues depending on the GPU used, we are working on it but we advise to currently use Non-HDR.") + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") Component.onCompleted: { diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index f64b89ca2..e39f85d21 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -9,10 +9,19 @@ #include "streaming/session.h" #include "streaming/video/videoenhancement.h" +#include "public/common/AMFFactory.h" +#include "public/include/core/Platform.h" +#include "public/include/components/VideoConverter.h" +// Video upscaling & Sharpening +#include "public/include/components/HQScaler.h" +// Reducing blocking artifacts +#include "public/include/components/VQEnhancer.h" + #include #include #include #include +#include extern "C" { #include @@ -23,6 +32,8 @@ extern "C" { #include +using Microsoft::WRL::ComPtr; + #pragma comment(lib, "d3d11.lib") #pragma comment(lib, "dxgi.lib") @@ -88,31 +99,25 @@ static_assert(sizeof(CSC_CONST_BUF) % 16 == 0, "Constant buffer sizes must be a D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) : m_DecoderSelectionPass(decoderSelectionPass), - m_Factory(nullptr), - m_Device(nullptr), - m_SwapChain(nullptr), - m_DeviceContext(nullptr), - m_RenderTargetView(nullptr), - m_VideoDevice(nullptr), - m_VideoContext(nullptr), - m_VideoProcessor(nullptr), - m_VideoProcessorEnumerator(nullptr), - m_LastColorSpace(-1), - m_LastFullRange(false), - m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), - m_AllowTearing(false), - m_VideoGenericPixelShader(nullptr), - m_VideoBt601LimPixelShader(nullptr), - m_VideoBt2020LimPixelShader(nullptr), - m_VideoVertexBuffer(nullptr), - m_VideoTexture(nullptr), - m_OverlayLock(0), - m_OverlayPixelShader(nullptr), - m_HwDeviceContext(nullptr) + m_Device(nullptr), + m_DeviceContext(nullptr), + m_RenderTargetView(nullptr), + m_VideoProcessor(nullptr), + m_VideoProcessorEnumerator(nullptr), + m_LastColorSpace(-1), + m_LastFullRange(false), + m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), + m_AllowTearing(false), + m_OverlayLock(0), + m_HwDeviceContext(nullptr), + m_AmfContext(nullptr), + m_AmfInputSurface(nullptr), + m_AmfDenoiser(nullptr), + m_AmfFormatConverter(nullptr), + m_AmfUpScaler(nullptr), + m_AmfDownScaler(nullptr), + m_AmfInitialized(false) { - RtlZeroMemory(m_OverlayVertexBuffers, sizeof(m_OverlayVertexBuffers)); - RtlZeroMemory(m_OverlayTextures, sizeof(m_OverlayTextures)); - RtlZeroMemory(m_OverlayTextureResourceViews, sizeof(m_OverlayTextureResourceViews)); RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); m_ContextLock = SDL_CreateMutex(); @@ -128,35 +133,40 @@ D3D11VARenderer::~D3D11VARenderer() SDL_DestroyMutex(m_ContextLock); - SAFE_COM_RELEASE(m_VideoVertexBuffer); - SAFE_COM_RELEASE(m_VideoBt2020LimPixelShader); - SAFE_COM_RELEASE(m_VideoBt601LimPixelShader); - SAFE_COM_RELEASE(m_VideoGenericPixelShader); - for (int i = 0; i < ARRAYSIZE(m_VideoTextureResourceViews); i++) { SAFE_COM_RELEASE(m_VideoTextureResourceViews[i]); } - SAFE_COM_RELEASE(m_VideoTexture); + SAFE_COM_RELEASE(m_RenderTargetView); - for (int i = 0; i < ARRAYSIZE(m_OverlayVertexBuffers); i++) { - SAFE_COM_RELEASE(m_OverlayVertexBuffers[i]); + // cleanup AMF instances + if(m_AmfDenoiser){ + // Denoiser + m_AmfDenoiser->Terminate(); + m_AmfDenoiser = nullptr; } - - for (int i = 0; i < ARRAYSIZE(m_OverlayTextureResourceViews); i++) { - SAFE_COM_RELEASE(m_OverlayTextureResourceViews[i]); + if(m_AmfFormatConverter){ + // Format converter + m_AmfFormatConverter->Terminate(); + m_AmfFormatConverter = nullptr; } - - for (int i = 0; i < ARRAYSIZE(m_OverlayTextures); i++) { - SAFE_COM_RELEASE(m_OverlayTextures[i]); + if(m_AmfUpScaler){ + // Up Scaler + m_AmfUpScaler->Terminate(); + m_AmfUpScaler = nullptr; + } + if(m_AmfDownScaler){ + // Down Scaler + m_AmfDownScaler->Terminate(); + m_AmfDownScaler = nullptr; + } + if(m_AmfContext){ + // Context + m_AmfContext->Terminate(); + m_AmfContext = nullptr; } - SAFE_COM_RELEASE(m_OverlayPixelShader); - - SAFE_COM_RELEASE(m_BackBufferResource); - - SAFE_COM_RELEASE(m_RenderTargetView); - SAFE_COM_RELEASE(m_SwapChain); + g_AMFFactory.Terminate(); // Force destruction of the swapchain immediately if (m_DeviceContext != nullptr) { @@ -164,6 +174,20 @@ D3D11VARenderer::~D3D11VARenderer() m_DeviceContext->Flush(); } + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } + +#ifdef QT_DEBUG + ComPtr debugDevice; + if(FAILED(m_Device->QueryInterface(__uuidof(ID3D11Debug), reinterpret_cast(debugDevice.GetAddressOf())))) { + debugDevice = nullptr; + } +#endif + if (m_HwDeviceContext != nullptr) { // This will release m_Device and m_DeviceContext too av_buffer_unref(&m_HwDeviceContext); @@ -171,11 +195,20 @@ D3D11VARenderer::~D3D11VARenderer() else { SAFE_COM_RELEASE(m_Device); SAFE_COM_RELEASE(m_DeviceContext); - SAFE_COM_RELEASE(m_VideoDevice); - SAFE_COM_RELEASE(m_VideoContext); } - SAFE_COM_RELEASE(m_Factory); +// Uncomment the lines in the QT_DEBUG section if you need to debug DirectX objects +#ifdef QT_DEBUG + // if(debugDevice) { + // debugDevice->ReportLiveDeviceObjects(D3D11_RLDO_IGNORE_INTERNAL); + // } + // CComPtr pDebugDevice; + // if (SUCCEEDED(DXGIGetDebugInterface1(0, IID_PPV_ARGS(&pDebugDevice)))) + // { + // pDebugDevice->ReportLiveObjects(DXGI_DEBUG_DX, DXGI_DEBUG_RLO_FLAGS(DXGI_DEBUG_RLO_DETAIL | DXGI_DEBUG_RLO_IGNORE_INTERNAL)); + // } +#endif + } /** @@ -226,24 +259,57 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &streamHDRMetaData ); + // Set HDR Input for AMF Converter + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); + + // Values taken from AMF Sample: + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L740 + // We can initialize with Studio range first, it will be corrected to Full range if needed once the first frame is received. + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_STUDIO); + + AMFHDRMetadata amfHDRMetadata; + amfHDRMetadata.redPrimary[0] = amf_uint16(streamHDRMetaData.RedPrimary[0]); + amfHDRMetadata.redPrimary[1] = amf_uint16(streamHDRMetaData.RedPrimary[1]); + amfHDRMetadata.greenPrimary[0] = amf_uint16(streamHDRMetaData.GreenPrimary[0]); + amfHDRMetadata.greenPrimary[1] = amf_uint16(streamHDRMetaData.GreenPrimary[1]); + amfHDRMetadata.bluePrimary[0] = amf_uint16(streamHDRMetaData.BluePrimary[0]); + amfHDRMetadata.bluePrimary[1] = amf_uint16(streamHDRMetaData.BluePrimary[1]); + amfHDRMetadata.whitePoint[0] = amf_uint16(streamHDRMetaData.WhitePoint[0]); + amfHDRMetadata.whitePoint[1] = amf_uint16(streamHDRMetaData.WhitePoint[1]); + amfHDRMetadata.maxMasteringLuminance = amf_uint32(streamHDRMetaData.MaxMasteringLuminance); + amfHDRMetadata.minMasteringLuminance = amf_uint32(streamHDRMetaData.MinMasteringLuminance); + amfHDRMetadata.maxContentLightLevel = 0; + amfHDRMetadata.maxFrameAverageLightLevel = 0; + + amf::AMFBufferPtr pHDRMetaDataBuffer; + m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); + AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); + memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_HDR_METADATA, pData); + + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } + streamSet = true; } SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Set stream HDR mode: %s", streamSet ? "enabled" : "disabled"); - return; // Prepare HDR Meta Data to match the monitor HDR specifications // Retreive the monitor HDR metadata where the application is displayed int appAdapterIndex = 0; int appOutputIndex = 0; bool displaySet = false; if (SDL_DXGIGetOutputInfo(SDL_GetWindowDisplayIndex(m_DecoderParams.window), &appAdapterIndex, &appOutputIndex)){ - IDXGIAdapter1* adapter = nullptr; - IDXGIOutput* output = nullptr; + ComPtr adapter; + ComPtr output; UINT outputIndex = appOutputIndex; if(SUCCEEDED(m_Factory->EnumAdapters1(appAdapterIndex, &adapter))){ if(SUCCEEDED(adapter->EnumOutputs(outputIndex, &output))){ - IDXGIOutput6* output6 = nullptr; + ComPtr output6; if (SUCCEEDED(output->QueryInterface(__uuidof(IDXGIOutput6), (void**)&output6))) { DXGI_OUTPUT_DESC1 desc1; if (output6) { @@ -276,14 +342,44 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &outputHDRMetaData ); + // Set HDR Input for AMF Converter + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); + + // Values taken from AMF Sample: + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L732 + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_RANGE, AMF_COLOR_RANGE_FULL); + + AMFHDRMetadata amfHDRMetadata; + amfHDRMetadata.redPrimary[0] = amf_uint16(outputHDRMetaData.RedPrimary[0]); + amfHDRMetadata.redPrimary[1] = amf_uint16(outputHDRMetaData.RedPrimary[1]); + amfHDRMetadata.greenPrimary[0] = amf_uint16(outputHDRMetaData.GreenPrimary[0]); + amfHDRMetadata.greenPrimary[1] = amf_uint16(outputHDRMetaData.GreenPrimary[1]); + amfHDRMetadata.bluePrimary[0] = amf_uint16(outputHDRMetaData.BluePrimary[0]); + amfHDRMetadata.bluePrimary[1] = amf_uint16(outputHDRMetaData.BluePrimary[1]); + amfHDRMetadata.whitePoint[0] = amf_uint16(outputHDRMetaData.WhitePoint[0]); + amfHDRMetadata.whitePoint[1] = amf_uint16(outputHDRMetaData.WhitePoint[1]); + amfHDRMetadata.maxMasteringLuminance = amf_uint32(outputHDRMetaData.MaxMasteringLuminance); + amfHDRMetadata.minMasteringLuminance = amf_uint32(outputHDRMetaData.MinMasteringLuminance); + amfHDRMetadata.maxContentLightLevel = 0; + amfHDRMetadata.maxFrameAverageLightLevel = 0; + + amf::AMFBufferPtr pHDRMetaDataBuffer; + m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); + AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); + memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_HDR_METADATA, pData); + + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } + displaySet = true; } } - SAFE_COM_RELEASE(output6); } - SAFE_COM_RELEASE(output); } - SAFE_COM_RELEASE(adapter); } SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Set display HDR mode: %s", displaySet ? "enabled" : "disabled"); @@ -293,14 +389,21 @@ void D3D11VARenderer::setHdrMode(bool enabled){ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapterNotFound) { bool success = false; - IDXGIAdapter1* adapter = nullptr; + ComPtr adapter; DXGI_ADAPTER_DESC1 adapterDesc; HRESULT hr; +#ifdef QT_DEBUG + D3D_FEATURE_LEVEL featureLevel; + const D3D_FEATURE_LEVEL featureLevels[] = { + D3D_FEATURE_LEVEL_11_1, + }; +#endif + SDL_assert(m_Device == nullptr); SDL_assert(m_DeviceContext == nullptr); - hr = m_Factory->EnumAdapters1(adapterIndex, &adapter); + hr = m_Factory->EnumAdapters1(adapterIndex, adapter.GetAddressOf()); if (hr == DXGI_ERROR_NOT_FOUND) { // Expected at the end of enumeration goto Exit; @@ -332,20 +435,34 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter adapterDesc.VendorId, adapterDesc.DeviceId); - hr = D3D11CreateDevice(adapter, +#ifdef QT_DEBUG + // Generate more information about DirectX11 objects for debugging. + // https://seanmiddleditch.github.io/direct3d-11-debug-api-tricks/ + // Notes: + // * ID3D11Device Refcount: 2 => This is a normal behavior as debugDevice still need m_Device to work + // * For any other object, Refcount: 0, We can ignore IntRef value + hr = D3D11CreateDevice(adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, - D3D11_CREATE_DEVICE_VIDEO_SUPPORT - #ifdef QT_DEBUG - | D3D11_CREATE_DEVICE_DEBUG - #endif - , + D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_DEBUG, + featureLevels, + ARRAYSIZE(featureLevels), + D3D11_SDK_VERSION, + &m_Device, + &featureLevel, + &m_DeviceContext); +#else + hr = D3D11CreateDevice(adapter.Get(), + D3D_DRIVER_TYPE_UNKNOWN, + nullptr, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, nullptr, 0, D3D11_SDK_VERSION, &m_Device, nullptr, &m_DeviceContext); +#endif if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "D3D11CreateDevice() failed: %x", @@ -353,17 +470,21 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - createVideoProcessor(); + if(m_VideoEnhancement->isVideoEnhancementEnabled() && !createVideoProcessor()){ + // Disable enhancement if the Video Processor creation failed + m_VideoEnhancement->enableVideoEnhancement(false); + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "VideoProcessor failed to be created"); } - if (!checkDecoderSupport(adapter)) { + if (!checkDecoderSupport(adapter.Get())) { SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } goto Exit; } @@ -374,7 +495,6 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter if (adapterNotFound != nullptr) { *adapterNotFound = (adapter == nullptr); } - SAFE_COM_RELEASE(adapter); return success; } @@ -388,13 +508,13 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter */ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() { - IDXGIAdapter1* adapter = nullptr; + ComPtr adapter; DXGI_ADAPTER_DESC1 adapterDesc; int highestScore = -1; int adapterIndex = -1; int index = 0; - while(m_Factory->EnumAdapters1(index, &adapter) != DXGI_ERROR_NOT_FOUND) + while(m_Factory->EnumAdapters1(index, adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND) { if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { @@ -406,13 +526,15 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } if (SUCCEEDED(D3D11CreateDevice( - adapter, + adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, @@ -424,7 +546,7 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() &m_DeviceContext)) && createVideoProcessor()){ - // VSR has the priority over HDR in term of capability we want to use. + // VSR has the priority over SDR-to-HDR in term of capability we want to use. // The priority value may change over the time, // below statement has been established based on drivers' capabilities status by February 29th 2024. @@ -460,19 +582,21 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() } // Set Video enhancement information - if(adapterIndex >= 0 && m_Factory->EnumAdapters1(adapterIndex, &adapter) != DXGI_ERROR_NOT_FOUND){ + if(m_Factory->EnumAdapters1(adapterIndex, adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND){ if (SUCCEEDED(adapter->GetDesc1(&adapterDesc))) { SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } if (SUCCEEDED(D3D11CreateDevice( - adapter, + adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, @@ -486,33 +610,44 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() m_VideoEnhancement->setVendorID(adapterDesc.VendorId); - // Convert wchar[128] to string - std::wstring GPUname(adapterDesc.Description); - qInfo() << "GPU used for Video Enhancmeent: " << GPUname; - - if(m_VideoEnhancement->isVendorAMD()){ - m_VideoEnhancement->setVSRcapable(enableAMDVideoSuperResolution()); - m_VideoEnhancement->setHDRcapable(enableAMDHDR()); - } else if(m_VideoEnhancement->isVendorIntel()){ - m_VideoEnhancement->setVSRcapable(enableIntelVideoSuperResolution()); - m_VideoEnhancement->setHDRcapable(enableIntelHDR()); - } else if(m_VideoEnhancement->isVendorNVIDIA()){ - m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution()); - m_VideoEnhancement->setHDRcapable(enableNvidiaHDR()); - } + if(adapterIndex >= 0){ + // Convert wchar[128] to string + std::wstring GPUname(adapterDesc.Description); + qInfo() << "GPU used for Video Enhancement: " << GPUname; + + // Test, but do not active yet to ensure it will be reinitialize when needed + if(m_VideoEnhancement->isVendorAMD()){ + m_VideoEnhancement->setVSRcapable(enableAMDVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableAMDHDR(false)); + } else if(m_VideoEnhancement->isVendorIntel()){ + m_VideoEnhancement->setVSRcapable(enableIntelVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableIntelHDR(false)); + } else if(m_VideoEnhancement->isVendorNVIDIA()){ + m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution(false)); + m_VideoEnhancement->setHDRcapable(enableNvidiaHDR(false)); + } - // Enable the visibility of Video enhancement feature in the settings of the User interface - m_VideoEnhancement->enableUIvisible(); + // Enable the visibility of Video enhancement feature in the settings of the User interface + m_VideoEnhancement->enableUIvisible(); + } else { + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + if (m_VideoProcessorCapabilities.AutoStreamCaps & D3D11_VIDEO_PROCESSOR_AUTO_STREAM_CAPS_SUPER_RESOLUTION){ + m_AutoStreamSuperResolution = true; + m_VideoEnhancement->setVSRcapable(true); + } + } } } } SAFE_COM_RELEASE(m_DeviceContext); SAFE_COM_RELEASE(m_Device); - SAFE_COM_RELEASE(m_VideoContext); - SAFE_COM_RELEASE(m_VideoDevice); - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } return adapterIndex; } @@ -527,16 +662,159 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() * \return bool Return true if the capability is available */ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo){ - // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 check how to implement it + // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 - // [TODO] Implement AMD Video Scaler - // Documentation and DX11 code sample - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_VQ_Enhancer_API.md - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/public/samples/CPPSamples/SimpleEncoder/SimpleEncoder.cpp + AMF_RESULT res; + amf::AMFCapsPtr amfCaps; + amf::AMFIOCapsPtr pInputCaps; + + // We skip if already initialized + if(m_AmfInitialized && activate) + return true; + + amf::AMF_SURFACE_FORMAT inputSurfaceFormat; + amf::AMF_SURFACE_FORMAT outputSurfaceFormat; + AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM outputColorSpace; + AMFColor backgroundColor = AMFConstructColor(0, 0, 0, 255); + + // AMF Context initialization + res = g_AMFFactory.Init(); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateContext(&m_AmfContext); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVQEnhancer, &m_AmfDenoiser); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverter); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfUpScaler); + if (res != AMF_OK) goto Error; + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfDownScaler); + if (res != AMF_OK) goto Error; + + res = m_AmfContext->InitDX11(m_Device); + if (res != AMF_OK) goto Error; + + // AMFHQScaler is the newest feature available (v1.4.33), so at least this one need to be accessible + m_AmfUpScaler->GetCaps(&amfCaps); + if (amfCaps != nullptr && amfCaps->GetAccelerationType() == amf::AMF_ACCEL_NOT_SUPPORTED) { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "The hardware does not support needed AMD AMF capabilities."); + goto Error; + } + + // Format initialization + inputSurfaceFormat = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_P010 : amf::AMF_SURFACE_NV12; + outputSurfaceFormat = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_R10G10B10A2 : amf::AMF_SURFACE_RGBA; + outputColorSpace = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) + ? AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020 + : AMF_VIDEO_CONVERTER_COLOR_PROFILE_709; + + // Input Surace initialization + res = m_AmfContext->AllocSurface(amf::AMF_MEMORY_DX11, + inputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height, + &m_AmfInputSurface); + if (res != AMF_OK) goto Error; + + // Denoiser initialization (Reduce blocking artifacts) + m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_OUTPUT_SIZE, ::AMFConstructSize(m_DecoderParams.width, m_DecoderParams.height)); + m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + m_AmfDenoiser->SetProperty(AMF_VE_FCR_ATTENUATION, 0.10); + m_AmfDenoiser->SetProperty(AMF_VE_FCR_RADIUS, 1); + res = m_AmfDenoiser->Init(inputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + // Convert to full range picture to reduce the "chroma subsampling blur" effect, and enable the use of FSR 1.1 (otherwise can only use FSR 1.0) + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, outputSurfaceFormat); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); + res = m_AmfFormatConverter->Init(inputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + // Upscale initialization + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_OutputTexture.width * m_ScaleUp, m_OutputTexture.height * m_ScaleUp)); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x + if( + m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width >= 1.1 + && m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width <= 2 + && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height >= 1.1 + && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height <= 2 + ){ + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_1); + } else { + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); + } + + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_ScaleUp == 1 ? 2.00 : 0.50); // We only apply sharpening when the picture is scaled + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); + // Initialize with the size of the texture that will be input + res = m_AmfUpScaler->Init(outputSurfaceFormat, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + + // Frame Generation + // Cannot use, not available for DirectX11 + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_FRC_API.md#21-component-initialization + + // Downscale (to the app window size) initialization + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_OutputTexture.width, m_OutputTexture.height)); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FILL, true); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, 2.00); + m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); + res = m_AmfDownScaler->Init(outputSurfaceFormat, + m_OutputTexture.width * m_ScaleUp, + m_OutputTexture.height * m_ScaleUp); + if (res != AMF_OK) goto Error; + + if(!activate){ + // Denoiser + m_AmfDenoiser->Terminate(); + m_AmfDenoiser = nullptr; + // Format converter + m_AmfFormatConverter->Terminate(); + m_AmfFormatConverter = nullptr; + // Up Scaler + m_AmfUpScaler->Terminate(); + m_AmfUpScaler = nullptr; + // Down Scaler + m_AmfDownScaler->Terminate(); + m_AmfDownScaler = nullptr; + // Context + m_AmfContext->Terminate(); + m_AmfContext = nullptr; + // Factory + g_AMFFactory.Terminate(); + + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution disabled"); + } else { + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution enabled"); + } + + m_AmfInitialized = activate; + return true; - if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution capability is not yet supported by your client's GPU."); +Error: + if(logInfo) SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "AMD Video Super Resolution failed."); + m_AmfInitialized = false; return false; } @@ -771,6 +1049,10 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) m_DecoderParams = *params; + // Use only even number to avoid a crash a texture creation + m_DecoderParams.width = m_DecoderParams.width & ~1; + m_DecoderParams.height = m_DecoderParams.height & ~1; + if (qgetenv("D3D11VA_ENABLED") == "0") { SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "D3D11VA is disabled by environment variable"); @@ -795,7 +1077,39 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - hr = CreateDXGIFactory(__uuidof(IDXGIFactory5), (void**)&m_Factory); + // Use the current window size as the swapchain size + SDL_GetWindowSize(m_DecoderParams.window, (int*)&m_DisplayWidth, (int*)&m_DisplayHeight); + + // Rounddown to even number to avoid a crash at texture creation + m_DisplayWidth = m_DisplayWidth & ~1; + m_DisplayHeight = m_DisplayHeight & ~1; + + // As m_Display correponds to the application window, which may not have the same ratio as the Frame, + // we calculate the size of the final texture to fit in the window without distortion + m_OutputTexture.width = m_DisplayWidth; + m_OutputTexture.height = m_DisplayHeight; + m_OutputTexture.left = 0; + m_OutputTexture.top = 0; + + // Sscale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + + if(ratioHeight < ratioWidth){ + // Adjust the Width + m_OutputTexture.width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); + m_OutputTexture.width = m_OutputTexture.width & ~1; + m_OutputTexture.left = static_cast(std::floor( abs(m_DisplayWidth - m_OutputTexture.width) / 2 )); + m_OutputTexture.left = m_OutputTexture.left & ~1; + } else if(ratioWidth < ratioHeight) { + // Adjust the Height + m_OutputTexture.height = static_cast(std::floor(m_DecoderParams.height * ratioWidth)); + m_OutputTexture.height = m_OutputTexture.height & ~1; + m_OutputTexture.top = static_cast(std::floor( abs(m_DisplayHeight - m_OutputTexture.height) / 2 )); + m_OutputTexture.top = m_OutputTexture.top & ~1; + } + + hr = CreateDXGIFactory(__uuidof(IDXGIFactory5), (void**)m_Factory.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "CreateDXGIFactory() failed: %x", @@ -806,6 +1120,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // If getAdapterIndex return 0+, it means that we already identified which adapter best fit for Video enhancement, // so we don't have to estimate it more times to speed up the launch of the streaming. if(m_VideoEnhancement->getAdapterIndex() < 0){ + // This line is run only once during the application life and is necessary to display (or not) + // the Video enhancement checkbox if the GPU enables it int adapterIndex = getAdapterIndexByEnhancementCapabilities(); if(adapterIndex >= 0){ m_VideoEnhancement->setAdapterIndex(adapterIndex); @@ -851,9 +1167,27 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // Set VSR and HDR if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // We draw on a bigger output, this will give more space to any vendor Scale Up solution to generate more + // details with less artifacts around contrasted borders. + m_ScaleUp = 2; + if(m_DecoderParams.width == m_OutputTexture.width || m_DecoderParams.height == m_OutputTexture.height){ + // We don't scale up when the pixel ratio is 1:1 between the input frame and the output texture, + // it help to keep perfect pixel matching from original + m_ScaleUp = 1; + } + if(m_DisplayWidth > 2560 || m_DisplayHeight > 1440){ + // For anything bigger than 1440p, we don't scale as it will require to much ressources for low-end devices. + // We want to keep a ratio 1:1 pixel to avoid blur effect when the texture is scale down at rendering. + m_ScaleUp = 1; + } // Enable VSR feature if available if(m_VideoEnhancement->isVSRcapable()){ - if(m_VideoEnhancement->isVendorAMD()){ + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + if (m_AutoStreamSuperResolution){ + // The flag does exist, but not the method yet (by March 8th, 2024) + // We still can prepare the code once Microsof enable it. + // m_VideoContext->VideoProcessorSetStreamSuperResolution(m_VideoProcessor.Get(), 0, true); + } else if(m_VideoEnhancement->isVendorAMD()){ enableAMDVideoSuperResolution(); } else if(m_VideoEnhancement->isVendorIntel()){ enableIntelVideoSuperResolution(); @@ -902,12 +1236,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // causes performance issues (buffer starvation) on AMD GPUs. swapChainDesc.BufferCount = 3 + 1 + 1; - // Use the current window size as the swapchain size - SDL_GetWindowSize(params->window, (int*)&swapChainDesc.Width, (int*)&swapChainDesc.Height); - - m_DisplayWidth = swapChainDesc.Width; - m_DisplayHeight = swapChainDesc.Height; - + swapChainDesc.Width = m_DisplayWidth; + swapChainDesc.Height = m_DisplayHeight; if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) { swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM; } @@ -951,7 +1281,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // DXVA2 may let us take over for FSE V-sync off cases. However, if we don't have DXGI_FEATURE_PRESENT_ALLOW_TEARING // then we should not attempt to do this unless there's no other option (HDR, DXVA2 failed in pass 1, etc). if (!m_AllowTearing && m_DecoderSelectionPass == 0 && !(params->videoFormat & VIDEO_FORMAT_MASK_10BIT) && - (SDL_GetWindowFlags(params->window) & SDL_WINDOW_FULLSCREEN_DESKTOP) == SDL_WINDOW_FULLSCREEN) { + (SDL_GetWindowFlags(params->window) & SDL_WINDOW_FULLSCREEN_DESKTOP) == SDL_WINDOW_FULLSCREEN) { SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, "Defaulting to DXVA2 for FSE without DXGI_FEATURE_PRESENT_ALLOW_TEARING support"); return false; @@ -965,13 +1295,13 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // Always use windowed or borderless windowed mode.. SDL does mode-setting for us in // full-screen exclusive mode (SDL_WINDOW_FULLSCREEN), so this actually works out okay. - IDXGISwapChain1* swapChain; + ComPtr swapChain; hr = m_Factory->CreateSwapChainForHwnd(m_Device, info.info.win.window, &swapChainDesc, nullptr, nullptr, - &swapChain); + swapChain.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -980,8 +1310,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - hr = swapChain->QueryInterface(__uuidof(IDXGISwapChain4), (void**)&m_SwapChain); - swapChain->Release(); + hr = swapChain->QueryInterface(__uuidof(IDXGISwapChain4), (void**)m_SwapChain.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1034,8 +1363,8 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } - // Create our video texture and SRVs - if (!setupVideoTexture()) { + // Create our video textures and SRVs + if (!setupVideoTexture() || !setupFrameTexture()) { return false; } @@ -1043,8 +1372,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) initializeVideoProcessor(); } - SAFE_COM_RELEASE(m_BackBufferResource); - return true; } @@ -1107,7 +1434,7 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020) failed: %x", hr); } - if (m_VideoProcessor && m_VideoProcessorEnumerator) { + if (m_VideoProcessor) { m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); }; } @@ -1119,7 +1446,7 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709) failed: %x", hr); } - if (m_VideoProcessor && m_VideoProcessorEnumerator) { + if (m_VideoProcessor) { m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } } @@ -1157,11 +1484,11 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type) return; } - ID3D11Texture2D* overlayTexture = m_OverlayTextures[type]; - ID3D11Buffer* overlayVertexBuffer = m_OverlayVertexBuffers[type]; - ID3D11ShaderResourceView* overlayTextureResourceView = m_OverlayTextureResourceViews[type]; + ComPtr overlayTexture = m_OverlayTextures[type].Get(); + ComPtr overlayVertexBuffer = m_OverlayVertexBuffers[type].Get(); + ComPtr overlayTextureResourceView = m_OverlayTextureResourceViews[type].Get(); - if (overlayTexture == nullptr) { + if (overlayTexture.Get() == nullptr) { SDL_AtomicUnlock(&m_OverlayLock); return; } @@ -1169,27 +1496,20 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type) // Reference these objects so they don't immediately go away if the // overlay update thread tries to release them. SDL_assert(overlayVertexBuffer != nullptr); - overlayTexture->AddRef(); - overlayVertexBuffer->AddRef(); - overlayTextureResourceView->AddRef(); SDL_AtomicUnlock(&m_OverlayLock); // Bind vertex buffer UINT stride = sizeof(VERTEX); UINT offset = 0; - m_DeviceContext->IASetVertexBuffers(0, 1, &overlayVertexBuffer, &stride, &offset); + m_DeviceContext->IASetVertexBuffers(0, 1, overlayVertexBuffer.GetAddressOf(), &stride, &offset); // Bind pixel shader and resources - m_DeviceContext->PSSetShader(m_OverlayPixelShader, nullptr, 0); - m_DeviceContext->PSSetShaderResources(0, 1, &overlayTextureResourceView); + m_DeviceContext->PSSetShader(m_OverlayPixelShader.Get(), nullptr, 0); + m_DeviceContext->PSSetShaderResources(0, 1, overlayTextureResourceView.GetAddressOf()); // Draw the overlay m_DeviceContext->DrawIndexed(6, 0, 0); - - overlayTextureResourceView->Release(); - overlayTexture->Release(); - overlayVertexBuffer->Release(); } void D3D11VARenderer::bindColorConversion(AVFrame* frame) @@ -1199,14 +1519,14 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) // We have purpose-built shaders for the common Rec 601 (SDR) and Rec 2020 (HDR) cases if (!fullRange && colorspace == COLORSPACE_REC_601) { - m_DeviceContext->PSSetShader(m_VideoBt601LimPixelShader, nullptr, 0); + m_DeviceContext->PSSetShader(m_VideoBt601LimPixelShader.Get(), nullptr, 0); } else if (!fullRange && colorspace == COLORSPACE_REC_2020) { - m_DeviceContext->PSSetShader(m_VideoBt2020LimPixelShader, nullptr, 0); + m_DeviceContext->PSSetShader(m_VideoBt2020LimPixelShader.Get(), nullptr, 0); } else { // We'll need to use the generic shader for this colorspace and color range combo - m_DeviceContext->PSSetShader(m_VideoGenericPixelShader, nullptr, 0); + m_DeviceContext->PSSetShader(m_VideoGenericPixelShader.Get(), nullptr, 0); // If nothing has changed since last frame, we're done if (colorspace == m_LastColorSpace && fullRange == m_LastFullRange) { @@ -1259,11 +1579,11 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) D3D11_SUBRESOURCE_DATA constData = {}; constData.pSysMem = &constBuf; - ID3D11Buffer* constantBuffer; - HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer); + ComPtr constantBuffer; + HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, constantBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->PSSetConstantBuffers(0, 1, &constantBuffer); - constantBuffer->Release(); + m_DeviceContext->PSSetConstantBuffers(0, 1, constantBuffer.GetAddressOf()); + // constantBuffer->Release(); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1306,33 +1626,41 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) switch (frameColorSpace) { case COLORSPACE_REC_2020: // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disabled (which is fine as we already have native HDR) - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 : DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); if(m_VideoEnhancement->isVendorNVIDIA()){ // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. } + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, frameFullRange ? AMF_COLOR_RANGE_FULL : AMF_COLOR_RANGE_STUDIO); + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } break; default: // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709 : DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); if(m_VideoEnhancement->isVendorNVIDIA()){ // Always enable NVIDIA VSR for SDR Stream content enableNvidiaVideoSuperResolution(); } + if(m_AmfInitialized){ + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, false); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_UNDEFINED); + m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_UNDEFINED); + m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + } } } void D3D11VARenderer::renderVideo(AVFrame* frame) { - // Bind video rendering vertex buffer - UINT stride = sizeof(VERTEX); - UINT offset = 0; - m_DeviceContext->IASetVertexBuffers(0, 1, &m_VideoVertexBuffer, &stride, &offset); - - // Copy this frame (minus alignment padding) into our video texture D3D11_BOX srcBox; srcBox.left = 0; srcBox.top = 0; @@ -1340,7 +1668,57 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) srcBox.bottom = m_DecoderParams.height; srcBox.front = 0; srcBox.back = 1; - m_DeviceContext->CopySubresourceRegion(m_VideoTexture, 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + + // Setup for AMD AMF + if(m_AmfInitialized){ + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_FrameTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + m_AmfContext->CreateSurfaceFromDX11Native(m_FrameTexture.Get(), &m_AmfInputSurface, nullptr); + + amf::AMFDataPtr m_AmfData(m_AmfInputSurface); + + // Denoisier => Reduce deblocking artifacts due to compressed streamed content + m_AmfDenoiser->SubmitInput(m_AmfData); + m_AmfDenoiser->QueryOutput(&m_AmfData); + m_AmfDenoiser->Flush(); + + // Format converter => To provide best color rendering + m_AmfFormatConverter->SubmitInput(m_AmfData); + m_AmfFormatConverter->QueryOutput(&m_AmfData); + m_AmfFormatConverter->Flush(); + + // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity + m_AmfUpScaler->SubmitInput(m_AmfData); + m_AmfUpScaler->QueryOutput(&m_AmfData); + m_AmfUpScaler->Flush(); + + // We don't need to scale down if the pixel ratio is already 1:1 + if(m_ScaleUp != 1){ + // Down Scaling => To avoid a blur effect if relying on VideoProcessor, this method provides clear pixel rendering + m_AmfDownScaler->SubmitInput(m_AmfData); + m_AmfDownScaler->QueryOutput(&m_AmfData); + m_AmfDownScaler->Flush(); + } + + amf::AMFSurfacePtr amfOutputSurface(m_AmfData); + m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)amfOutputSurface->GetPlaneAt(0)->GetNative()); + } else { + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + } + + } else { + + // Bind video rendering vertex buffer + UINT stride = sizeof(VERTEX); + UINT offset = 0; + m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset); + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + + } // Draw the video if(m_VideoEnhancement->isVideoEnhancementEnabled()){ @@ -1372,19 +1750,23 @@ bool D3D11VARenderer::createVideoProcessor() HRESULT hr; D3D11_VIDEO_PROCESSOR_CONTENT_DESC content_desc; - m_VideoProcessorEnumerator = nullptr; - m_VideoProcessor = nullptr; + if(m_VideoProcessorEnumerator){ + m_VideoProcessorEnumerator.Reset(); + } + if(m_VideoProcessor){ + m_VideoProcessor.Reset(); + } // Get video device hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), - (void**)&m_VideoDevice); + (void**)m_VideoDevice.GetAddressOf()); if (FAILED(hr)) { return false; } // Get video context hr = m_DeviceContext->QueryInterface(__uuidof(ID3D11VideoContext2), - (void**)&m_VideoContext); + (void**)m_VideoContext.GetAddressOf()); if (FAILED(hr)) { return false; } @@ -1399,7 +1781,7 @@ bool D3D11VARenderer::createVideoProcessor() content_desc.OutputHeight = m_DisplayHeight; content_desc.OutputFrameRate.Numerator = m_DecoderParams.frameRate; content_desc.OutputFrameRate.Denominator = 1; - content_desc.Usage = D3D11_VIDEO_USAGE_OPTIMAL_SPEED; + content_desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; hr = m_VideoDevice->CreateVideoProcessorEnumerator(&content_desc, &m_VideoProcessorEnumerator); if (FAILED(hr)) @@ -1410,6 +1792,11 @@ bool D3D11VARenderer::createVideoProcessor() if (FAILED(hr)) return false; + hr = m_VideoProcessorEnumerator->GetVideoProcessorCaps(&m_VideoProcessorCapabilities); + if (FAILED(hr)) { + return false; + } + return true; } @@ -1423,53 +1810,50 @@ bool D3D11VARenderer::createVideoProcessor() bool D3D11VARenderer::initializeVideoProcessor() { HRESULT hr; + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc; + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc; - m_VideoContext->VideoProcessorSetStreamAutoProcessingMode(m_VideoProcessor.Get(), 0, false); m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); // Set Background color D3D11_VIDEO_COLOR bgColor; - bgColor.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color - m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), true, &bgColor); + bgColor.RGBA = { 0, 0, 0, 1 }; // black color + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), false, &bgColor); - ZeroMemory(&m_OutputViewDesc, sizeof(m_OutputViewDesc)); - m_OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; - m_OutputViewDesc.Texture2D.MipSlice = 0; + ZeroMemory(&outputViewDesc, sizeof(outputViewDesc)); + outputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; + outputViewDesc.Texture2D.MipSlice = 0; hr = m_VideoDevice->CreateVideoProcessorOutputView( - m_BackBufferResource, + m_BackBufferResource.Get(), m_VideoProcessorEnumerator.Get(), - &m_OutputViewDesc, + &outputViewDesc, (ID3D11VideoProcessorOutputView**)&m_OutputView); - if (FAILED(hr)) { + if (FAILED(hr)) return false; - } - ZeroMemory(&m_InputViewDesc, sizeof(m_InputViewDesc)); - m_InputViewDesc.FourCC = 0; - m_InputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; - m_InputViewDesc.Texture2D.MipSlice = 0; - m_InputViewDesc.Texture2D.ArraySlice = 0; + ZeroMemory(&inputViewDesc, sizeof(inputViewDesc)); + inputViewDesc.FourCC = 0; + inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + inputViewDesc.Texture2D.MipSlice = 0; + inputViewDesc.Texture2D.ArraySlice = 0; hr = m_VideoDevice->CreateVideoProcessorInputView( - m_VideoTexture, m_VideoProcessorEnumerator.Get(), &m_InputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); + m_VideoTexture.Get(), m_VideoProcessorEnumerator.Get(), &inputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); if (FAILED(hr)) return false; RECT dstRect = { 0 }; dstRect.right = m_DisplayWidth; dstRect.bottom = m_DisplayHeight; + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + dstRect.right = m_OutputTexture.width; + dstRect.bottom = m_OutputTexture.height; - // Sscale the source to the destination surface while keeping the same ratio - float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); - float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); - - // [TODO] There is a behavior I don't understand (bug?) when the destination desRect is larger by one of its side than the source. - // If it is bigger, the window becomes black, but if it is smaller it is fine. - // Only one case is working when it is bigger is when the dstRest perfectly equal to the Display size. - // Investigation: If there anything to do with pixel alignment (c.f. dxva2.cpp FFALIGN), or screenSpaceToNormalizedDeviceCoords ? - // Fix: When bigger we strech the picture to the window, it will be deformed, but at least will not crash. - if(m_DisplayWidth < m_DecoderParams.width && m_DisplayHeight < m_DecoderParams.height){ + // Scale the source to the destination surface while keeping the same ratio + float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); + float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); + // When VSR for NVIDIA is in use in Window mode, it may bug (black screen) the rendering while streatching bigger the window if(ratioHeight < ratioWidth){ // Adjust the Width long width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); @@ -1482,8 +1866,8 @@ bool D3D11VARenderer::initializeVideoProcessor() dstRect.bottom = dstRect.top + height; } } - m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &dstRect); + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); ZeroMemory(&m_StreamData, sizeof(m_StreamData)); @@ -1506,23 +1890,27 @@ bool D3D11VARenderer::initializeVideoProcessor() m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); } - // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance - int noiseReduction = 0; - int edgeEnhancement = 0; + // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance. + // It does work in addition to AI-enhancement for better result. if(m_VideoEnhancement->isVendorAMD()){ - noiseReduction = 30; - edgeEnhancement = 30; + // AMD has its own filters } else if(m_VideoEnhancement->isVendorIntel()){ - noiseReduction = 30; - edgeEnhancement = 30; + // Reduce blocking artifacts + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 30); // (0 / 0 / 100) + } else if(m_VideoEnhancement->isVendorNVIDIA()){ - noiseReduction = 30; - edgeEnhancement = 30; + // Reduce blocking artifacts + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) + // Sharpen sligthly the picture to enhance details + if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 30); // (0 / 0 / 100) + } - // Reduce artefacts (like pixelisation around text), does work in addition to AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, noiseReduction); // (0 / 0 / 100) - // Sharpen sligthly the picture to enhance details, does work in addition to AI-enhancement for better result - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, edgeEnhancement); // (0 / 0 / 100) // Default on SDR, it will switch to HDR automatically at the 1st frame received if the Stream source has HDR active. m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); @@ -1543,21 +1931,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) return; } - SDL_AtomicLock(&m_OverlayLock); - ID3D11Texture2D* oldTexture = m_OverlayTextures[type]; - m_OverlayTextures[type] = nullptr; - - ID3D11Buffer* oldVertexBuffer = m_OverlayVertexBuffers[type]; - m_OverlayVertexBuffers[type] = nullptr; - - ID3D11ShaderResourceView* oldTextureResourceView = m_OverlayTextureResourceViews[type]; - m_OverlayTextureResourceViews[type] = nullptr; - SDL_AtomicUnlock(&m_OverlayLock); - - SAFE_COM_RELEASE(oldTextureResourceView); - SAFE_COM_RELEASE(oldTexture); - SAFE_COM_RELEASE(oldVertexBuffer); - // If the overlay is disabled, we're done if (!overlayEnabled) { SDL_FreeSurface(newSurface); @@ -1585,21 +1958,18 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) texData.pSysMem = newSurface->pixels; texData.SysMemPitch = newSurface->pitch; - ID3D11Texture2D* newTexture; - hr = m_Device->CreateTexture2D(&texDesc, &texData, &newTexture); + ComPtr newTexture; + hr = m_Device->CreateTexture2D(&texDesc, &texData, newTexture.GetAddressOf()); if (FAILED(hr)) { - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", hr); return; } - ID3D11ShaderResourceView* newTextureResourceView = nullptr; - hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture, nullptr, &newTextureResourceView); + ComPtr newTextureResourceView; + hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture.Get(), nullptr, newTextureResourceView.GetAddressOf()); if (FAILED(hr)) { - SAFE_COM_RELEASE(newTexture); - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateShaderResourceView() failed: %x", hr); @@ -1648,11 +2018,9 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) D3D11_SUBRESOURCE_DATA vbData = {}; vbData.pSysMem = verts; - ID3D11Buffer* newVertexBuffer; - hr = m_Device->CreateBuffer(&vbDesc, &vbData, &newVertexBuffer); + ComPtr newVertexBuffer; + hr = m_Device->CreateBuffer(&vbDesc, &vbData, newVertexBuffer.GetAddressOf()); if (FAILED(hr)) { - SAFE_COM_RELEASE(newTextureResourceView); - SAFE_COM_RELEASE(newTexture); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", hr); @@ -1660,24 +2028,18 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) } SDL_AtomicLock(&m_OverlayLock); - m_OverlayVertexBuffers[type] = newVertexBuffer; - m_OverlayTextures[type] = newTexture; - m_OverlayTextureResourceViews[type] = newTextureResourceView; + m_OverlayVertexBuffers[type] = newVertexBuffer.Get(); + m_OverlayTextures[type] = newTexture.Get(); + m_OverlayTextureResourceViews[type] = newTextureResourceView.Get(); SDL_AtomicUnlock(&m_OverlayLock); } bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) { HRESULT hr; - ID3D11VideoDevice* videoDevice; - // Derive a ID3D11VideoDevice from our ID3D11Device. - hr = m_Device->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&videoDevice); - if (FAILED(hr)) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::QueryInterface(ID3D11VideoDevice) failed: %x", - hr); - return false; + if(m_VideoDevice == nullptr){ + createVideoProcessor(); } // Check if the format is supported by this decoder @@ -1685,88 +2047,75 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) switch (m_DecoderParams.videoFormat) { case VIDEO_FORMAT_H264: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_H264_VLD_NOFGT, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support H.264 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support H.264 decoding to NV12 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_H265: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC decoding to NV12 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_H265_MAIN10: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_P010, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main10 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main10 decoding to P010 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_AV1_MAIN8: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_NV12, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 decoding to NV12 format"); - videoDevice->Release(); return false; } break; case VIDEO_FORMAT_AV1_MAIN10: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_AV1_VLD_PROFILE0, DXGI_FORMAT_P010, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 Main10 decoding"); - videoDevice->Release(); return false; } else if (!supported) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support AV1 Main10 decoding to P010 format"); - videoDevice->Release(); return false; } break; default: SDL_assert(false); - videoDevice->Release(); return false; } - videoDevice->Release(); - DXGI_ADAPTER_DESC adapterDesc; hr = adapter->GetDesc(&adapterDesc); if (FAILED(hr)) { @@ -1843,11 +2192,10 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray vertexShaderBytecode = Path::readDataFile("d3d11_vertex.fxc"); - ID3D11VertexShader* vertexShader; - hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, &vertexShader); + ComPtr vertexShader; + hr = m_Device->CreateVertexShader(vertexShaderBytecode.constData(), vertexShaderBytecode.length(), nullptr, vertexShader.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->VSSetShader(vertexShader, nullptr, 0); - vertexShader->Release(); + m_DeviceContext->VSSetShader(vertexShader.Get(), nullptr, 0); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1861,11 +2209,10 @@ bool D3D11VARenderer::setupRenderingResources() { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 }, }; - ID3D11InputLayout* inputLayout; - hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), &inputLayout); + ComPtr inputLayout; + hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), inputLayout.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->IASetInputLayout(inputLayout); - inputLayout->Release(); + m_DeviceContext->IASetInputLayout(inputLayout.Get()); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1878,7 +2225,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray overlayPixelShaderBytecode = Path::readDataFile("d3d11_overlay_pixel.fxc"); - hr = m_Device->CreatePixelShader(overlayPixelShaderBytecode.constData(), overlayPixelShaderBytecode.length(), nullptr, &m_OverlayPixelShader); + hr = m_Device->CreatePixelShader(overlayPixelShaderBytecode.constData(), overlayPixelShaderBytecode.length(), nullptr, m_OverlayPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1890,7 +2237,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray videoPixelShaderBytecode = Path::readDataFile("d3d11_genyuv_pixel.fxc"); - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoGenericPixelShader); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoGenericPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1902,7 +2249,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray videoPixelShaderBytecode = Path::readDataFile("d3d11_bt601lim_pixel.fxc"); - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoBt601LimPixelShader); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoBt601LimPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1914,7 +2261,7 @@ bool D3D11VARenderer::setupRenderingResources() { QByteArray videoPixelShaderBytecode = Path::readDataFile("d3d11_bt2020lim_pixel.fxc"); - hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, &m_VideoBt2020LimPixelShader); + hr = m_Device->CreatePixelShader(videoPixelShaderBytecode.constData(), videoPixelShaderBytecode.length(), nullptr, m_VideoBt2020LimPixelShader.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreatePixelShader() failed: %x", @@ -1936,11 +2283,10 @@ bool D3D11VARenderer::setupRenderingResources() samplerDesc.MinLOD = 0.0f; samplerDesc.MaxLOD = D3D11_FLOAT32_MAX; - ID3D11SamplerState* sampler; - hr = m_Device->CreateSamplerState(&samplerDesc, &sampler); + ComPtr sampler; + hr = m_Device->CreateSamplerState(&samplerDesc, sampler.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->PSSetSamplers(0, 1, &sampler); - sampler->Release(); + m_DeviceContext->PSSetSamplers(0, 1, sampler.GetAddressOf()); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1952,7 +2298,7 @@ bool D3D11VARenderer::setupRenderingResources() // Create our render target view { - hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&m_BackBufferResource); + hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)m_BackBufferResource.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "IDXGISwapChain::GetBuffer() failed: %x", @@ -1960,7 +2306,7 @@ bool D3D11VARenderer::setupRenderingResources() return false; } - hr = m_Device->CreateRenderTargetView(m_BackBufferResource, nullptr, &m_RenderTargetView); + hr = m_Device->CreateRenderTargetView(m_BackBufferResource.Get(), nullptr, &m_RenderTargetView); // m_BackBufferResource is still needed in createVideoProcessor(), therefore will be released later if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1985,11 +2331,10 @@ bool D3D11VARenderer::setupRenderingResources() indexBufferData.pSysMem = indexes; indexBufferData.SysMemPitch = sizeof(int); - ID3D11Buffer* indexBuffer; - hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, &indexBuffer); + ComPtr indexBuffer; + hr = m_Device->CreateBuffer(&indexBufferDesc, &indexBufferData, indexBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->IASetIndexBuffer(indexBuffer, DXGI_FORMAT_R32_UINT, 0); - indexBuffer->Release(); + m_DeviceContext->IASetIndexBuffer(indexBuffer.Get(), DXGI_FORMAT_R32_UINT, 0); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2034,7 +2379,7 @@ bool D3D11VARenderer::setupRenderingResources() D3D11_SUBRESOURCE_DATA vbData = {}; vbData.pSysMem = verts; - hr = m_Device->CreateBuffer(&vbDesc, &vbData, &m_VideoVertexBuffer); + hr = m_Device->CreateBuffer(&vbDesc, &vbData, m_VideoVertexBuffer.GetAddressOf()); if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", @@ -2057,11 +2402,11 @@ bool D3D11VARenderer::setupRenderingResources() blendDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD; blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL; - ID3D11BlendState* blendState; - hr = m_Device->CreateBlendState(&blendDesc, &blendState); + ComPtr blendState; + hr = m_Device->CreateBlendState(&blendDesc, blendState.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->OMSetBlendState(blendState, nullptr, 0xffffffff); - blendState->Release(); + m_DeviceContext->OMSetBlendState(blendState.Get(), nullptr, 0xffffffff); + // blendState->Release(); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2093,25 +2438,37 @@ bool D3D11VARenderer::setupVideoTexture() HRESULT hr; D3D11_TEXTURE2D_DESC texDesc = {}; + // Size of the output texture texDesc.Width = m_DecoderParams.width; texDesc.Height = m_DecoderParams.height; + if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ + texDesc.Width = m_OutputTexture.width; + texDesc.Height = m_OutputTexture.height; + } texDesc.MipLevels = 1; texDesc.ArraySize = 1; - texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R10G10B10A2_UNORM : DXGI_FORMAT_R8G8B8A8_UNORM; + } else { + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + } texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement - if(m_DecoderParams.enableVideoEnhancement && m_VideoEnhancement->isEnhancementCapable()){ + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; } texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; - hr = m_Device->CreateTexture2D(&texDesc, nullptr, &m_VideoTexture); + if(m_AmfInitialized){ + texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; + } + + hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_VideoTexture.GetAddressOf()); if (FAILED(hr)) { - m_VideoTexture = nullptr; SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", hr); @@ -2124,7 +2481,7 @@ bool D3D11VARenderer::setupVideoTexture() srvDesc.Texture2D.MostDetailedMip = 0; srvDesc.Texture2D.MipLevels = 1; srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture, &srvDesc, &m_VideoTextureResourceViews[0]); + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0]); if (FAILED(hr)) { m_VideoTextureResourceViews[0] = nullptr; SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2134,7 +2491,7 @@ bool D3D11VARenderer::setupVideoTexture() } srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture, &srvDesc, &m_VideoTextureResourceViews[1]); + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[1]); if (FAILED(hr)) { m_VideoTextureResourceViews[1] = nullptr; SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -2145,3 +2502,33 @@ bool D3D11VARenderer::setupVideoTexture() return true; } + +bool D3D11VARenderer::setupFrameTexture() +{ + // Texture description + D3D11_TEXTURE2D_DESC texDesc = {}; + // Same size as the input Frame + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + texDesc.MipLevels = 1; + texDesc.ArraySize = 1; + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + texDesc.SampleDesc.Quality = 0; + texDesc.SampleDesc.Count = 1; + texDesc.Usage = D3D11_USAGE_DEFAULT; + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; + } + texDesc.CPUAccessFlags = 0; + texDesc.MiscFlags = 0; + + HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_FrameTexture.GetAddressOf()); + if (FAILED(hr)) { + // Handle error + return false; + } + + return true; +} diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index ad5c24221..45bb9c10d 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -8,11 +8,14 @@ #include #include #include "streaming/video/videoenhancement.h" +#include "public/common/AMFFactory.h" extern "C" { #include } +using Microsoft::WRL::ComPtr; + class D3D11VARenderer : public IFFmpegRenderer { public: @@ -33,6 +36,7 @@ class D3D11VARenderer : public IFFmpegRenderer bool setupRenderingResources(); bool setupVideoTexture(); + bool setupFrameTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); void prepareVideoProcessorStream(AVFrame* frame); @@ -53,24 +57,25 @@ class D3D11VARenderer : public IFFmpegRenderer int m_AdapterIndex = 0; int m_OutputIndex = 0; - IDXGIFactory5* m_Factory; + ComPtr m_Factory; + // Cannt convert to ComPtr because of av_buffer_unref() ID3D11Device* m_Device; - IDXGISwapChain4* m_SwapChain; ID3D11DeviceContext* m_DeviceContext; + ComPtr m_SwapChain; ID3D11RenderTargetView* m_RenderTargetView; SDL_mutex* m_ContextLock; - ID3D11VideoDevice* m_VideoDevice; - ID3D11VideoContext2* m_VideoContext; - Microsoft::WRL::ComPtr m_VideoProcessor; - Microsoft::WRL::ComPtr m_VideoProcessorEnumerator; - D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC m_OutputViewDesc; - D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC m_InputViewDesc; + ComPtr m_VideoDevice; + ComPtr m_VideoContext; + ComPtr m_VideoProcessor; + ComPtr m_VideoProcessorEnumerator; + D3D11_VIDEO_PROCESSOR_CAPS m_VideoProcessorCapabilities; D3D11_VIDEO_PROCESSOR_STREAM m_StreamData; - Microsoft::WRL::ComPtr m_OutputView; - Microsoft::WRL::ComPtr m_InputView; - ID3D11Resource* m_BackBufferResource; + ComPtr m_OutputView; + ComPtr m_InputView; + ComPtr m_BackBufferResource; VideoEnhancement* m_VideoEnhancement; + bool m_AutoStreamSuperResolution = false; // Variable unused, but keep it as reference for debugging purpose DXGI_COLOR_SPACE_TYPE m_ColorSpaces[26] = { @@ -102,8 +107,6 @@ class D3D11VARenderer : public IFFmpegRenderer DXGI_COLOR_SPACE_CUSTOM, // 25 }; - ID3D11ShaderResourceView* m_VideoTextureResourceView; - DECODER_PARAMETERS m_DecoderParams; int m_DisplayWidth; int m_DisplayHeight; @@ -113,20 +116,41 @@ class D3D11VARenderer : public IFFmpegRenderer bool m_AllowTearing; - ID3D11PixelShader* m_VideoGenericPixelShader; - ID3D11PixelShader* m_VideoBt601LimPixelShader; - ID3D11PixelShader* m_VideoBt2020LimPixelShader; - ID3D11Buffer* m_VideoVertexBuffer; + ComPtr m_VideoGenericPixelShader; + ComPtr m_VideoBt601LimPixelShader; + ComPtr m_VideoBt2020LimPixelShader; + ComPtr m_VideoVertexBuffer; - ID3D11Texture2D* m_VideoTexture; + ComPtr m_FrameTexture; + ComPtr m_VideoTexture; ID3D11ShaderResourceView* m_VideoTextureResourceViews[2]; + float m_ScaleUp = 1; + struct { + int width; + int height; + int left; + int top; + } m_OutputTexture; + SDL_SpinLock m_OverlayLock; - ID3D11Buffer* m_OverlayVertexBuffers[Overlay::OverlayMax]; - ID3D11Texture2D* m_OverlayTextures[Overlay::OverlayMax]; - ID3D11ShaderResourceView* m_OverlayTextureResourceViews[Overlay::OverlayMax]; - ID3D11PixelShader* m_OverlayPixelShader; + ComPtr m_OverlayVertexBuffers[Overlay::OverlayMax]; + ComPtr m_OverlayTextures[Overlay::OverlayMax]; + ComPtr m_OverlayTextureResourceViews[Overlay::OverlayMax]; + ComPtr m_OverlayPixelShader; AVBufferRef* m_HwDeviceContext; + + // AMD (AMF) + amf::AMFContextPtr m_AmfContext; + amf::AMFSurfacePtr m_AmfInputSurface; + amf::AMFComponentPtr m_AmfDenoiser; + amf::AMFComponentPtr m_AmfFormatConverter; + amf::AMFComponentPtr m_AmfUpScaler; + // amf::AMFComponentPtr does not work for m_AmfDownScaler, have to use raw pointer + amf::AMFComponent* m_AmfDownScaler; + + bool m_AmfInitialized = false; + }; diff --git a/third-party/AMF b/third-party/AMF new file mode 160000 index 000000000..85eea8d43 --- /dev/null +++ b/third-party/AMF @@ -0,0 +1 @@ +Subproject commit 85eea8d43511967dcf98f063d3d3efa573536ae3 From 4c965dbd5379657df47b62871ebac82bd1cf1c62 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 29 Mar 2024 22:57:50 +0100 Subject: [PATCH 32/53] Color rendering fix for NVIDIA and Intel when using VideoProcessor - Update the pipeline for Video Enhancement by adding Shaders (the same as standard rendering without Enhancement) after processing the video via VideoProcessor. - This update also fix the issue of crashing the rendering while resizing bigger the window in Video Enhancement mode. --- .../video/ffmpeg-renderers/d3d11va.cpp | 467 ++++++++++-------- .../video/ffmpeg-renderers/d3d11va.h | 38 +- 2 files changed, 259 insertions(+), 246 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index e39f85d21..5833e3195 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -391,6 +391,7 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter bool success = false; ComPtr adapter; DXGI_ADAPTER_DESC1 adapterDesc; + ComPtr pMultithread; HRESULT hr; #ifdef QT_DEBUG @@ -470,6 +471,12 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } + // Avoid the application to crash in case of multithread conflict on the same resource + if(SUCCEEDED(m_Device->QueryInterface(__uuidof(ID3D11Multithread), (void**)&pMultithread))) + { + pMultithread->SetMultithreadProtected(true); + } + if(m_VideoEnhancement->isVideoEnhancementEnabled() && !createVideoProcessor()){ // Disable enhancement if the Video Processor creation failed m_VideoEnhancement->enableVideoEnhancement(false); @@ -735,8 +742,8 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); res = m_AmfFormatConverter->Init(inputSurfaceFormat, - m_DecoderParams.width, - m_DecoderParams.height); + m_DecoderParams.width, + m_DecoderParams.height); if (res != AMF_OK) goto Error; // Upscale initialization @@ -744,7 +751,7 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x if( - m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width >= 1.1 + m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width >= 1.1 && m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width <= 2 && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height >= 1.1 && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height <= 2 @@ -781,8 +788,8 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, 2.00); m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); res = m_AmfDownScaler->Init(outputSurfaceFormat, - m_OutputTexture.width * m_ScaleUp, - m_OutputTexture.height * m_ScaleUp); + m_OutputTexture.width * m_ScaleUp, + m_OutputTexture.height * m_ScaleUp); if (res != AMF_OK) goto Error; if(!activate){ @@ -1084,7 +1091,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) m_DisplayWidth = m_DisplayWidth & ~1; m_DisplayHeight = m_DisplayHeight & ~1; - // As m_Display correponds to the application window, which may not have the same ratio as the Frame, + // As m_Display corresponds to the application window, which may not have the same ratio as the Frame, // we calculate the size of the final texture to fit in the window without distortion m_OutputTexture.width = m_DisplayWidth; m_OutputTexture.height = m_DisplayHeight; @@ -1203,7 +1210,12 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } else if(m_VideoEnhancement->isVendorIntel()){ enableIntelHDR(); } else if(m_VideoEnhancement->isVendorNVIDIA()){ - enableNvidiaHDR(); + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + // Disable SDR->HDR feature because the screen becomes grey when activated + enableNvidiaHDR(false); + } else { + enableNvidiaHDR(); + } } } } @@ -1245,14 +1257,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; } - // [TODO] With NVIDIA RTX, while renderering using VideoProcessor with HDR activated in Moonlight, - // DXGI_FORMAT_R10G10B10A2_UNORM gives worse result than DXGI_FORMAT_R8G8B8A8_UNORM. - // Without this fix, HDR off on server renders gray screen and VSR is inactive (DXGI_COLOR_SPACE_TYPE type 8). - // For user perspective, it is better to not see such a bug, so for the moment I choose to force DXGI_FORMAT_R8G8B8A8_UNORM - if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_VideoEnhancement->isVendorNVIDIA()){ - swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; - } - // Use DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING with flip mode for non-vsync case, if possible. // NOTE: This is only possible in windowed or borderless windowed mode. if (!params->enableVsync) { @@ -1330,10 +1334,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - if (!setupRenderingResources()) { - return false; - } - { m_HwDeviceContext = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_D3D11VA); if (!m_HwDeviceContext) { @@ -1364,7 +1364,12 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } // Create our video textures and SRVs - if (!setupVideoTexture() || !setupFrameTexture()) { + if (!setupEnhancedTexture() || !setupVideoTexture() || !setupAmfTexture()) { + return false; + } + + // As for Video Enhancement, the RTV uses a texture, it needs to be setup after the textures creation + if (!setupRenderingResources()) { return false; } @@ -1434,9 +1439,6 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020) failed: %x", hr); } - if (m_VideoProcessor) { - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); - }; } else { // Restore default sRGB colorspace @@ -1446,9 +1448,6 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) "IDXGISwapChain::SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709) failed: %x", hr); } - if (m_VideoProcessor) { - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); - } } m_LastColorTrc = frame->color_trc; @@ -1457,6 +1456,10 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) // Present according to the decoder parameters hr = m_SwapChain->Present(0, flags); + // Unbind the shader resource view to avoid conflicts on the next frame + ID3D11ShaderResourceView* nullSRV[2] = { nullptr, nullptr }; + m_DeviceContext->PSSetShaderResources(0, 2, nullSRV); + // Release the context lock unlockContext(this); @@ -1607,11 +1610,6 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) */ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) { - //Do Nothing when Moonlight's HDR is disabled - if(!(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT)){ - return; - } - bool frameFullRange = isFrameFullRange(frame); int frameColorSpace = getFrameColorspace(frame); @@ -1624,13 +1622,15 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) m_LastFullRange = frameFullRange; switch (frameColorSpace) { + case COLORSPACE_REC_2020: - // This Stream Color Space accepts HDR mode from Server, but NVIDIA AI-HDR will be disabled (which is fine as we already have native HDR) m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 : DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 : DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + if(m_VideoEnhancement->isVendorNVIDIA()){ - // [TODO] Remove this line if NVIDIA fix the issue of having VSR not working (add a gray filter) - // while HDR is activated for Stream content (swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;) - enableNvidiaVideoSuperResolution(); // Turn it "false" if we prefer to not see the white border around elements when VSR is active. + // VSR from Nvidia does not work yet on HDR content (Observation by March 28th, 2024) + // https://en.wikipedia.org/wiki/Video_Super_Resolution#:~:text=The%20feature%20supports%20input%20resolutions,likely%20added%20in%20the%20future + enableNvidiaVideoSuperResolution(false); } if(m_AmfInitialized){ m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); @@ -1640,13 +1640,13 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); } break; + default: - // This Stream Color Space is SDR, which enable the use of NVIDIA AI-HDR (Moonlight's HDR needs to be enabled) - // I don't know why, it is gray when HDR is on on Moonlight while using DXGI_FORMAT_R10G10B10A2_UNORM for the SwapChain, - // the fix is to force using DXGI_FORMAT_R8G8B8A8_UNORM which seems somehow not impacting the color rendering - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709 : DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709 : DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), frameFullRange ? DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709 : DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + if(m_VideoEnhancement->isVendorNVIDIA()){ - // Always enable NVIDIA VSR for SDR Stream content + // Always enable NVIDIA VSR for SDR content enableNvidiaVideoSuperResolution(); } if(m_AmfInitialized){ @@ -1656,11 +1656,17 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_UNDEFINED); m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); } + } } void D3D11VARenderer::renderVideo(AVFrame* frame) { + // Bind video rendering vertex buffer + UINT stride = sizeof(VERTEX); + UINT offset = 0; + m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset); + D3D11_BOX srcBox; srcBox.left = 0; srcBox.top = 0; @@ -1669,73 +1675,61 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) srcBox.front = 0; srcBox.back = 1; - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - - // Setup for AMD AMF - if(m_AmfInitialized){ - // Copy this frame (minus alignment padding) into a temporary video texture - m_DeviceContext->CopySubresourceRegion(m_FrameTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); - m_AmfContext->CreateSurfaceFromDX11Native(m_FrameTexture.Get(), &m_AmfInputSurface, nullptr); - - amf::AMFDataPtr m_AmfData(m_AmfInputSurface); - - // Denoisier => Reduce deblocking artifacts due to compressed streamed content - m_AmfDenoiser->SubmitInput(m_AmfData); - m_AmfDenoiser->QueryOutput(&m_AmfData); - m_AmfDenoiser->Flush(); - - // Format converter => To provide best color rendering - m_AmfFormatConverter->SubmitInput(m_AmfData); - m_AmfFormatConverter->QueryOutput(&m_AmfData); - m_AmfFormatConverter->Flush(); - - // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity - m_AmfUpScaler->SubmitInput(m_AmfData); - m_AmfUpScaler->QueryOutput(&m_AmfData); - m_AmfUpScaler->Flush(); - - // We don't need to scale down if the pixel ratio is already 1:1 - if(m_ScaleUp != 1){ - // Down Scaling => To avoid a blur effect if relying on VideoProcessor, this method provides clear pixel rendering - m_AmfDownScaler->SubmitInput(m_AmfData); - m_AmfDownScaler->QueryOutput(&m_AmfData); - m_AmfDownScaler->Flush(); - } - - amf::AMFSurfacePtr amfOutputSurface(m_AmfData); - m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)amfOutputSurface->GetPlaneAt(0)->GetNative()); - } else { - // Copy this frame (minus alignment padding) into a temporary video texture - m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); - } - - } else { - - // Bind video rendering vertex buffer - UINT stride = sizeof(VERTEX); - UINT offset = 0; - m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset); + // Setup for AMD AMF + if(m_AmfInitialized){ // Copy this frame (minus alignment padding) into a temporary video texture - m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + m_DeviceContext->CopySubresourceRegion(m_AmfTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + m_AmfContext->CreateSurfaceFromDX11Native(m_AmfTexture.Get(), &m_AmfInputSurface, nullptr); + + amf::AMFDataPtr m_AmfData(m_AmfInputSurface); + + // Denoisier => Reduce deblocking artifacts due to compressed streamed content + m_AmfDenoiser->SubmitInput(m_AmfData); + m_AmfDenoiser->QueryOutput(&m_AmfData); + m_AmfDenoiser->Flush(); + + // Format converter => To provide best color rendering + m_AmfFormatConverter->SubmitInput(m_AmfData); + m_AmfFormatConverter->QueryOutput(&m_AmfData); + m_AmfFormatConverter->Flush(); + + // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity + m_AmfUpScaler->SubmitInput(m_AmfData); + m_AmfUpScaler->QueryOutput(&m_AmfData); + m_AmfUpScaler->Flush(); + + // We don't need to scale down if the pixel ratio is already 1:1 + if(m_ScaleUp != 1){ + // Down Scaling => To avoid a blur effect if relying on VideoProcessor, this method provides clear pixel rendering + m_AmfDownScaler->SubmitInput(m_AmfData); + m_AmfDownScaler->QueryOutput(&m_AmfData); + m_AmfDownScaler->Flush(); + } + amf::AMFSurfacePtr amfOutputSurface(m_AmfData); + m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)amfOutputSurface->GetPlaneAt(0)->GetNative()); } - // Draw the video if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_EnhancedTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); // Prepare the Stream prepareVideoProcessorStream(frame); - // Render to the front the frames processed by the Video Processor + // Process operations on the output Texture m_VideoContext->VideoProcessorBlt(m_VideoProcessor.Get(), m_OutputView.Get(), 0, 1, &m_StreamData); } else { - // Bind our CSC shader (and constant buffer, if required) - bindColorConversion(frame); + // Copy this frame (minus alignment padding) into a temporary video texture + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + } - // Bind SRVs for this frame - m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews); + // Bind our CSC shader (and constant buffer, if required) + bindColorConversion(frame); - // Draw the video - m_DeviceContext->DrawIndexed(6, 0, 0); - } + // Bind SRVs for this frame + m_DeviceContext->PSSetShaderResources(0, 2, m_VideoTextureResourceViews); + + // Process shaders on the output texture + m_DeviceContext->DrawIndexed(6, 0, 0); } /** @@ -1810,84 +1804,71 @@ bool D3D11VARenderer::createVideoProcessor() bool D3D11VARenderer::initializeVideoProcessor() { HRESULT hr; - D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc; + + // INPUT setting D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc; - m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, 0); + ZeroMemory(&inputViewDesc, sizeof(inputViewDesc)); + inputViewDesc.FourCC = 0; + inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + inputViewDesc.Texture2D.MipSlice = 0; + inputViewDesc.Texture2D.ArraySlice = 0; - // Set Background color - D3D11_VIDEO_COLOR bgColor; - bgColor.RGBA = { 0, 0, 0, 1 }; // black color - m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), false, &bgColor); + hr = m_VideoDevice->CreateVideoProcessorInputView( + m_EnhancedTexture.Get(), + m_VideoProcessorEnumerator.Get(), + &inputViewDesc, + (ID3D11VideoProcessorInputView**)&m_InputView); + if (FAILED(hr)) + return false; + + RECT inputRect = { 0 }; + inputRect.right = m_DisplayWidth; + inputRect.bottom = m_DisplayHeight; + m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &inputRect); + + m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); + + // Initialize Color spaces, this will be adjusted once the first frame is received + if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); + } else { + m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); + } + + + // OUTPUT setting + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc; ZeroMemory(&outputViewDesc, sizeof(outputViewDesc)); outputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; outputViewDesc.Texture2D.MipSlice = 0; hr = m_VideoDevice->CreateVideoProcessorOutputView( - m_BackBufferResource.Get(), + m_VideoTexture.Get(), m_VideoProcessorEnumerator.Get(), &outputViewDesc, (ID3D11VideoProcessorOutputView**)&m_OutputView); if (FAILED(hr)) return false; - ZeroMemory(&inputViewDesc, sizeof(inputViewDesc)); - inputViewDesc.FourCC = 0; - inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; - inputViewDesc.Texture2D.MipSlice = 0; - inputViewDesc.Texture2D.ArraySlice = 0; + RECT targetRect = { 0 }; + targetRect.right = m_DisplayWidth; + targetRect.bottom = m_DisplayHeight; + m_VideoContext->VideoProcessorSetOutputTargetRect(m_VideoProcessor.Get(), true, &targetRect); - hr = m_VideoDevice->CreateVideoProcessorInputView( - m_VideoTexture.Get(), m_VideoProcessorEnumerator.Get(), &inputViewDesc, (ID3D11VideoProcessorInputView**)&m_InputView); - if (FAILED(hr)) - return false; - - RECT dstRect = { 0 }; - dstRect.right = m_DisplayWidth; - dstRect.bottom = m_DisplayHeight; - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - dstRect.right = m_OutputTexture.width; - dstRect.bottom = m_OutputTexture.height; - - // Scale the source to the destination surface while keeping the same ratio - float ratioWidth = static_cast(m_DisplayWidth) / static_cast(m_DecoderParams.width); - float ratioHeight = static_cast(m_DisplayHeight) / static_cast(m_DecoderParams.height); - // When VSR for NVIDIA is in use in Window mode, it may bug (black screen) the rendering while streatching bigger the window - if(ratioHeight < ratioWidth){ - // Adjust the Width - long width = static_cast(std::floor(m_DecoderParams.width * ratioHeight)); - dstRect.left = static_cast(std::floor( abs(m_DisplayWidth - width) / 2 )); - dstRect.right = dstRect.left + width; - } else if(ratioWidth < ratioHeight) { - // Adjust the Height - long height = static_cast(std::floor(m_DecoderParams.height * ratioWidth)); - dstRect.top = static_cast(std::floor( abs(m_DisplayHeight - height) / 2 )); - dstRect.bottom = dstRect.top + height; - } - } - m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &dstRect); + m_VideoContext->VideoProcessorSetStreamOutputRate(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, false, NULL); - m_VideoContext->VideoProcessorSetStreamFrameFormat(m_VideoProcessor.Get(), 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE); - - ZeroMemory(&m_StreamData, sizeof(m_StreamData)); - m_StreamData.Enable = true; - m_StreamData.OutputIndex = m_OutputIndex; - m_StreamData.InputFrameOrField = 0; - m_StreamData.PastFrames = 0; - m_StreamData.FutureFrames = 0; - m_StreamData.ppPastSurfaces = nullptr; - m_StreamData.ppFutureSurfaces = nullptr; - m_StreamData.pInputSurface = m_InputView.Get(); - m_StreamData.ppPastSurfacesRight = nullptr; - m_StreamData.ppFutureSurfacesRight = nullptr; - m_StreamData.pInputSurfaceRight = nullptr; + // Set Background color + D3D11_VIDEO_COLOR bgColor; + bgColor.RGBA = { 0, 0, 0, 1 }; // black color + m_VideoContext->VideoProcessorSetOutputBackgroundColor(m_VideoProcessor.Get(), false, &bgColor); - // Set OutPut ColorSpace + // Initialize Color spaces, this will be adjusted once the first frame is received if(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT){ - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020); } else { - m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); + m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); } // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance. @@ -1900,7 +1881,7 @@ bool D3D11VARenderer::initializeVideoProcessor() m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) // Sharpen sligthly the picture to enhance details if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 30); // (0 / 0 / 100) + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 20); // (0 / 0 / 100) } else if(m_VideoEnhancement->isVendorNVIDIA()){ // Reduce blocking artifacts @@ -1908,12 +1889,21 @@ bool D3D11VARenderer::initializeVideoProcessor() m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION, true, 30); // (0 / 0 / 100) // Sharpen sligthly the picture to enhance details if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) - m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 30); // (0 / 0 / 100) - + m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 20); // (0 / 0 / 100) } - // Default on SDR, it will switch to HDR automatically at the 1st frame received if the Stream source has HDR active. - m_VideoContext->VideoProcessorSetStreamColorSpace1(m_VideoProcessor.Get(), 0, DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709); + ZeroMemory(&m_StreamData, sizeof(m_StreamData)); + m_StreamData.Enable = true; + m_StreamData.OutputIndex = m_OutputIndex; + m_StreamData.InputFrameOrField = 0; + m_StreamData.PastFrames = 0; + m_StreamData.FutureFrames = 0; + m_StreamData.ppPastSurfaces = nullptr; + m_StreamData.ppFutureSurfaces = nullptr; + m_StreamData.pInputSurface = m_InputView.Get(); + m_StreamData.ppPastSurfacesRight = nullptr; + m_StreamData.ppFutureSurfacesRight = nullptr; + m_StreamData.pInputSurfaceRight = nullptr; return true; } @@ -2000,12 +1990,12 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) newSurface = nullptr; VERTEX verts[] = - { - {renderRect.x, renderRect.y, 0, 1}, - {renderRect.x, renderRect.y+renderRect.h, 0, 0}, - {renderRect.x+renderRect.w, renderRect.y, 1, 1}, - {renderRect.x+renderRect.w, renderRect.y+renderRect.h, 1, 0}, - }; + { + {renderRect.x, renderRect.y, 0, 1}, + {renderRect.x, renderRect.y+renderRect.h, 0, 0}, + {renderRect.x+renderRect.w, renderRect.y, 1, 1}, + {renderRect.x+renderRect.w, renderRect.y+renderRect.h, 1, 0}, + }; D3D11_BUFFER_DESC vbDesc = {}; vbDesc.ByteWidth = sizeof(verts); @@ -2205,10 +2195,10 @@ bool D3D11VARenderer::setupRenderingResources() } const D3D11_INPUT_ELEMENT_DESC vertexDesc[] = - { - { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, - { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 }, - }; + { + { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, + { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 }, + }; ComPtr inputLayout; hr = m_Device->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), vertexShaderBytecode.constData(), vertexShaderBytecode.length(), inputLayout.GetAddressOf()); if (SUCCEEDED(hr)) { @@ -2307,7 +2297,6 @@ bool D3D11VARenderer::setupRenderingResources() } hr = m_Device->CreateRenderTargetView(m_BackBufferResource.Get(), nullptr, &m_RenderTargetView); - // m_BackBufferResource is still needed in createVideoProcessor(), therefore will be released later if (FAILED(hr)) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateRenderTargetView() failed: %x", @@ -2355,18 +2344,17 @@ bool D3D11VARenderer::setupRenderingResources() dst.w = m_DisplayWidth; dst.h = m_DisplayHeight; StreamUtils::scaleSourceToDestinationSurface(&src, &dst); - // Convert screen space to normalized device coordinates SDL_FRect renderRect; StreamUtils::screenSpaceToNormalizedDeviceCoords(&dst, &renderRect, m_DisplayWidth, m_DisplayHeight); VERTEX verts[] = - { - {renderRect.x, renderRect.y, 0, 1.0f}, - {renderRect.x, renderRect.y+renderRect.h, 0, 0}, - {renderRect.x+renderRect.w, renderRect.y, 1.0f, 1.0f}, - {renderRect.x+renderRect.w, renderRect.y+renderRect.h, 1.0f, 0}, - }; + { + {renderRect.x, renderRect.y, 0, 1.0f}, + {renderRect.x, renderRect.y+renderRect.h, 0, 0}, + {renderRect.x+renderRect.w, renderRect.y, 1.0f, 1.0f}, + {renderRect.x+renderRect.w, renderRect.y+renderRect.h, 1.0f, 0}, + }; D3D11_BUFFER_DESC vbDesc = {}; vbDesc.ByteWidth = sizeof(verts); @@ -2433,21 +2421,66 @@ bool D3D11VARenderer::setupRenderingResources() return true; } +/** + * \brief Set the Texture used by AMD AMF + * + * Set a YUV texture to be processed by AMD AMF to upscale and denoise + * + * \return bool Returns true if the texture is created + */ +bool D3D11VARenderer::setupAmfTexture() +{ + // Texture description + D3D11_TEXTURE2D_DESC texDesc = {}; + // Same size as the input Frame + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + texDesc.MipLevels = 1; + texDesc.ArraySize = 1; + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; + texDesc.SampleDesc.Quality = 0; + texDesc.SampleDesc.Count = 1; + texDesc.Usage = D3D11_USAGE_DEFAULT; + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; + texDesc.CPUAccessFlags = 0; + texDesc.MiscFlags = 0; + + HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_AmfTexture.GetAddressOf()); + if (FAILED(hr)) { + // Handle error + return false; + } + + return true; +} + +/** + * \brief Set the Texture used by the Shaders + * + * Set a YUV texture to be processed by the shaders to convert to colorisatin to RGBA + * + * \return bool Returns true if the texture is created + */ bool D3D11VARenderer::setupVideoTexture() { HRESULT hr; D3D11_TEXTURE2D_DESC texDesc = {}; // Size of the output texture - texDesc.Width = m_DecoderParams.width; - texDesc.Height = m_DecoderParams.height; - if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ + if(m_AmfInitialized){ texDesc.Width = m_OutputTexture.width; texDesc.Height = m_OutputTexture.height; + } if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + texDesc.Width = m_DisplayWidth; + texDesc.Height = m_DisplayHeight; + } else { + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; } + texDesc.MipLevels = 1; texDesc.ArraySize = 1; - if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ + if(m_AmfInitialized){ texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R10G10B10A2_UNORM : DXGI_FORMAT_R8G8B8A8_UNORM; } else { texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; @@ -2475,39 +2508,49 @@ bool D3D11VARenderer::setupVideoTexture() return false; } - // Create luminance and chrominance SRVs for each plane of the texture - D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc = {}; - srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; - srvDesc.Texture2D.MostDetailedMip = 0; - srvDesc.Texture2D.MipLevels = 1; - srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0]); - if (FAILED(hr)) { - m_VideoTextureResourceViews[0] = nullptr; - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::CreateShaderResourceView() failed: %x", - hr); - return false; - } + if(!m_AmfInitialized){ + // Create luminance and chrominance SRVs for each plane of the texture + D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc = {}; + srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; + srvDesc.Texture2D.MostDetailedMip = 0; + srvDesc.Texture2D.MipLevels = 1; + srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM; + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0]); + if (FAILED(hr)) { + m_VideoTextureResourceViews[0] = nullptr; + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "ID3D11Device::CreateShaderResourceView() failed: %x", + hr); + return false; + } - srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[1]); - if (FAILED(hr)) { - m_VideoTextureResourceViews[1] = nullptr; - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::CreateShaderResourceView() failed: %x", - hr); - return false; + srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM; + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[1]); + if (FAILED(hr)) { + m_VideoTextureResourceViews[1] = nullptr; + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "ID3D11Device::CreateShaderResourceView() failed: %x", + hr); + return false; + } } return true; } -bool D3D11VARenderer::setupFrameTexture() +/** + * \brief Set the Texture used by the Video Processor + * + * Set a RGBA texture to be processed by the Video processor to upscale and denoise + * + * \return bool Returns true if the texture is created + */ +bool D3D11VARenderer::setupEnhancedTexture() { - // Texture description + HRESULT hr; D3D11_TEXTURE2D_DESC texDesc = {}; - // Same size as the input Frame + + // Size of the output texture texDesc.Width = m_DecoderParams.width; texDesc.Height = m_DecoderParams.height; texDesc.MipLevels = 1; @@ -2516,17 +2559,15 @@ bool D3D11VARenderer::setupFrameTexture() texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; - texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; - // The flag D3D11_BIND_RENDER_TARGET is needed to enable the use of GPU enhancement - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - texDesc.BindFlags |= D3D11_BIND_RENDER_TARGET; - } + texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; - HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_FrameTexture.GetAddressOf()); + hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_EnhancedTexture.GetAddressOf()); if (FAILED(hr)) { - // Handle error + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "ID3D11Device::CreateTexture2D() failed: %x", + hr); return false; } diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 45bb9c10d..ba68b4db9 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -35,8 +35,9 @@ class D3D11VARenderer : public IFFmpegRenderer static void unlockContext(void* lock_ctx); bool setupRenderingResources(); + bool setupAmfTexture(); bool setupVideoTexture(); - bool setupFrameTexture(); + bool setupEnhancedTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); void prepareVideoProcessorStream(AVFrame* frame); @@ -77,36 +78,6 @@ class D3D11VARenderer : public IFFmpegRenderer VideoEnhancement* m_VideoEnhancement; bool m_AutoStreamSuperResolution = false; - // Variable unused, but keep it as reference for debugging purpose - DXGI_COLOR_SPACE_TYPE m_ColorSpaces[26] = { - DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709, // 0 - A - DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709, // 1 - A - DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709, // 2 - I * A - DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020, // 3 - I* - DXGI_COLOR_SPACE_RESERVED, // 4 - DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601, // 5 - O A - DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601, // 6 - I A - DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601, // 7 - O A - DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709, // 8 - I A - DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709, // 9 - A - DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020, // 10 - I - DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020, // 11 - O - DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020, // 12 - O O - DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, // 13 - I - DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, // 14 - I I* - DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_TOPLEFT_P2020, // 15 - I - DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_TOPLEFT_P2020, // 16 - I - DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P2020, // 17 - I I* - DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020, // 18 - I - DXGI_COLOR_SPACE_YCBCR_FULL_GHLG_TOPLEFT_P2020, // 19 - I - DXGI_COLOR_SPACE_RGB_STUDIO_G24_NONE_P709, // 20 - I I* - DXGI_COLOR_SPACE_RGB_STUDIO_G24_NONE_P2020, // 21 - I* - DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_LEFT_P709, // 22 - I - DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_LEFT_P2020, // 23 - I I - DXGI_COLOR_SPACE_YCBCR_STUDIO_G24_TOPLEFT_P2020, // 24 - I - DXGI_COLOR_SPACE_CUSTOM, // 25 - }; - DECODER_PARAMETERS m_DecoderParams; int m_DisplayWidth; int m_DisplayHeight; @@ -121,9 +92,11 @@ class D3D11VARenderer : public IFFmpegRenderer ComPtr m_VideoBt2020LimPixelShader; ComPtr m_VideoVertexBuffer; - ComPtr m_FrameTexture; + ComPtr m_AmfTexture; ComPtr m_VideoTexture; + ComPtr m_EnhancedTexture; ID3D11ShaderResourceView* m_VideoTextureResourceViews[2]; + float m_ScaleUp = 1; struct { int width; @@ -132,7 +105,6 @@ class D3D11VARenderer : public IFFmpegRenderer int top; } m_OutputTexture; - SDL_SpinLock m_OverlayLock; ComPtr m_OverlayVertexBuffers[Overlay::OverlayMax]; ComPtr m_OverlayTextures[Overlay::OverlayMax]; From 2afd2945f46ecbcae2b067a45bdc033f54cefca8 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sun, 31 Mar 2024 03:18:12 +0200 Subject: [PATCH 33/53] Color rendering fix for AMD when using AMF Update the pipeline for Video Enhancement by adding Shaders (the same as standard rendering without Enhancement) after processing the video via AMF. --- .../video/ffmpeg-renderers/d3d11va.cpp | 397 ++++++++---------- .../video/ffmpeg-renderers/d3d11va.h | 16 +- 2 files changed, 184 insertions(+), 229 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 5833e3195..93bf45eb0 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -113,9 +113,9 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_AmfContext(nullptr), m_AmfInputSurface(nullptr), m_AmfDenoiser(nullptr), - m_AmfFormatConverter(nullptr), + m_AmfFormatConverterYUVtoRGB(nullptr), m_AmfUpScaler(nullptr), - m_AmfDownScaler(nullptr), + m_AmfFormatConverterRGBtoYUV(nullptr), m_AmfInitialized(false) { RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); @@ -145,20 +145,20 @@ D3D11VARenderer::~D3D11VARenderer() m_AmfDenoiser->Terminate(); m_AmfDenoiser = nullptr; } - if(m_AmfFormatConverter){ - // Format converter - m_AmfFormatConverter->Terminate(); - m_AmfFormatConverter = nullptr; + if(m_AmfFormatConverterYUVtoRGB){ + // Format converter YUV to RGB + m_AmfFormatConverterYUVtoRGB->Terminate(); + m_AmfFormatConverterYUVtoRGB = nullptr; } if(m_AmfUpScaler){ // Up Scaler m_AmfUpScaler->Terminate(); m_AmfUpScaler = nullptr; } - if(m_AmfDownScaler){ - // Down Scaler - m_AmfDownScaler->Terminate(); - m_AmfDownScaler = nullptr; + if(m_AmfFormatConverterRGBtoYUV){ + // Format converter RGB to YUV + m_AmfFormatConverterRGBtoYUV->Terminate(); + m_AmfFormatConverterRGBtoYUV = nullptr; } if(m_AmfContext){ // Context @@ -259,40 +259,6 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &streamHDRMetaData ); - // Set HDR Input for AMF Converter - if(m_AmfInitialized){ - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); - - // Values taken from AMF Sample: - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L740 - // We can initialize with Studio range first, it will be corrected to Full range if needed once the first frame is received. - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_STUDIO); - - AMFHDRMetadata amfHDRMetadata; - amfHDRMetadata.redPrimary[0] = amf_uint16(streamHDRMetaData.RedPrimary[0]); - amfHDRMetadata.redPrimary[1] = amf_uint16(streamHDRMetaData.RedPrimary[1]); - amfHDRMetadata.greenPrimary[0] = amf_uint16(streamHDRMetaData.GreenPrimary[0]); - amfHDRMetadata.greenPrimary[1] = amf_uint16(streamHDRMetaData.GreenPrimary[1]); - amfHDRMetadata.bluePrimary[0] = amf_uint16(streamHDRMetaData.BluePrimary[0]); - amfHDRMetadata.bluePrimary[1] = amf_uint16(streamHDRMetaData.BluePrimary[1]); - amfHDRMetadata.whitePoint[0] = amf_uint16(streamHDRMetaData.WhitePoint[0]); - amfHDRMetadata.whitePoint[1] = amf_uint16(streamHDRMetaData.WhitePoint[1]); - amfHDRMetadata.maxMasteringLuminance = amf_uint32(streamHDRMetaData.MaxMasteringLuminance); - amfHDRMetadata.minMasteringLuminance = amf_uint32(streamHDRMetaData.MinMasteringLuminance); - amfHDRMetadata.maxContentLightLevel = 0; - amfHDRMetadata.maxFrameAverageLightLevel = 0; - - amf::AMFBufferPtr pHDRMetaDataBuffer; - m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); - AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); - memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_HDR_METADATA, pData); - - m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); - } - streamSet = true; } SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, @@ -342,39 +308,6 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &outputHDRMetaData ); - // Set HDR Input for AMF Converter - if(m_AmfInitialized){ - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); - - // Values taken from AMF Sample: - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L732 - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_RANGE, AMF_COLOR_RANGE_FULL); - - AMFHDRMetadata amfHDRMetadata; - amfHDRMetadata.redPrimary[0] = amf_uint16(outputHDRMetaData.RedPrimary[0]); - amfHDRMetadata.redPrimary[1] = amf_uint16(outputHDRMetaData.RedPrimary[1]); - amfHDRMetadata.greenPrimary[0] = amf_uint16(outputHDRMetaData.GreenPrimary[0]); - amfHDRMetadata.greenPrimary[1] = amf_uint16(outputHDRMetaData.GreenPrimary[1]); - amfHDRMetadata.bluePrimary[0] = amf_uint16(outputHDRMetaData.BluePrimary[0]); - amfHDRMetadata.bluePrimary[1] = amf_uint16(outputHDRMetaData.BluePrimary[1]); - amfHDRMetadata.whitePoint[0] = amf_uint16(outputHDRMetaData.WhitePoint[0]); - amfHDRMetadata.whitePoint[1] = amf_uint16(outputHDRMetaData.WhitePoint[1]); - amfHDRMetadata.maxMasteringLuminance = amf_uint32(outputHDRMetaData.MaxMasteringLuminance); - amfHDRMetadata.minMasteringLuminance = amf_uint32(outputHDRMetaData.MinMasteringLuminance); - amfHDRMetadata.maxContentLightLevel = 0; - amfHDRMetadata.maxFrameAverageLightLevel = 0; - - amf::AMFBufferPtr pHDRMetaDataBuffer; - m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); - AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); - memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_HDR_METADATA, pData); - - m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); - } - displaySet = true; } } @@ -681,8 +614,8 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) if(m_AmfInitialized && activate) return true; - amf::AMF_SURFACE_FORMAT inputSurfaceFormat; - amf::AMF_SURFACE_FORMAT outputSurfaceFormat; + amf::AMF_SURFACE_FORMAT SurfaceFormatYUV; + amf::AMF_SURFACE_FORMAT SurfaceFormatRGB; AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM outputColorSpace; AMFColor backgroundColor = AMFConstructColor(0, 0, 0, 255); @@ -693,11 +626,11 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) if (res != AMF_OK) goto Error; res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVQEnhancer, &m_AmfDenoiser); if (res != AMF_OK) goto Error; - res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverter); + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverterYUVtoRGB); if (res != AMF_OK) goto Error; res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfUpScaler); if (res != AMF_OK) goto Error; - res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfDownScaler); + res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverterRGBtoYUV); if (res != AMF_OK) goto Error; res = m_AmfContext->InitDX11(m_Device); @@ -710,65 +643,85 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) goto Error; } + // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x in RGBA + // When HDR is used, keep YUV format as RGBA render wrong colorization + if( + static_cast(m_DisplayWidth) / m_DecoderParams.width >= 1.1 + && static_cast(m_DisplayWidth) / m_DecoderParams.width <= 2 + && static_cast(m_DisplayHeight) / m_DecoderParams.height >= 1.1 + && static_cast(m_DisplayHeight) / m_DecoderParams.height <= 2 + && !(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) + ){ + m_amfRGB = true; + } else { + m_amfRGB = false; + } + // Format initialization - inputSurfaceFormat = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_P010 : amf::AMF_SURFACE_NV12; - outputSurfaceFormat = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_R10G10B10A2 : amf::AMF_SURFACE_RGBA; + SurfaceFormatYUV = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_P010 : amf::AMF_SURFACE_NV12; + SurfaceFormatRGB = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_R10G10B10A2 : amf::AMF_SURFACE_RGBA; outputColorSpace = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020 : AMF_VIDEO_CONVERTER_COLOR_PROFILE_709; // Input Surace initialization res = m_AmfContext->AllocSurface(amf::AMF_MEMORY_DX11, - inputSurfaceFormat, + SurfaceFormatYUV, m_DecoderParams.width, m_DecoderParams.height, &m_AmfInputSurface); if (res != AMF_OK) goto Error; // Denoiser initialization (Reduce blocking artifacts) + // Note: Do not use yet this feature, it washes out colors, impacts negatively the visual by loosing details, + // and also the attenuation value does not change anything. m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_OUTPUT_SIZE, ::AMFConstructSize(m_DecoderParams.width, m_DecoderParams.height)); m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); m_AmfDenoiser->SetProperty(AMF_VE_FCR_ATTENUATION, 0.10); - m_AmfDenoiser->SetProperty(AMF_VE_FCR_RADIUS, 1); - res = m_AmfDenoiser->Init(inputSurfaceFormat, + m_AmfDenoiser->SetProperty(AMF_VE_FCR_SPLIT_VIEW, 0); // When set to 1, it enables a side by side comparison view + res = m_AmfDenoiser->Init(SurfaceFormatYUV, m_DecoderParams.width, m_DecoderParams.height); if (res != AMF_OK) goto Error; - // Convert to full range picture to reduce the "chroma subsampling blur" effect, and enable the use of FSR 1.1 (otherwise can only use FSR 1.0) - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, outputSurfaceFormat); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); - res = m_AmfFormatConverter->Init(inputSurfaceFormat, - m_DecoderParams.width, - m_DecoderParams.height); - if (res != AMF_OK) goto Error; + if(m_amfRGB){ + // Convert to RGB to enable the use of FSR 1.1 + m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); + m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, SurfaceFormatRGB); + m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); + m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); + m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); + res = m_AmfFormatConverterYUVtoRGB->Init(SurfaceFormatYUV, + m_DecoderParams.width, + m_DecoderParams.height); + if (res != AMF_OK) goto Error; + } // Upscale initialization - m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_OutputTexture.width * m_ScaleUp, m_OutputTexture.height * m_ScaleUp)); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_DisplayWidth, m_DisplayHeight)); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); - // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x - if( - m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width >= 1.1 - && m_OutputTexture.width * m_ScaleUp / m_DecoderParams.width <= 2 - && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height >= 1.1 - && m_OutputTexture.height * m_ScaleUp / m_DecoderParams.height <= 2 - ){ + // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x in RGBA + if(m_amfRGB){ + // Compare to FSR 1.0, FSR 1.1 improvements image quality, with a better artifacts reduction and improved edge sharpnening m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_1); } else { m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); } - + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL, true); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); - m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); - m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_ScaleUp == 1 ? 2.00 : 0.50); // We only apply sharpening when the picture is scaled + // We only apply sharpening when the picture is scaled (0 = Most sharpened / 2.00 = Not sharpened) + if (m_OutputTexture.width == m_DecoderParams.width && m_OutputTexture.height == m_DecoderParams.height){ + m_AmfUpScalerSharpness = false; + } else { + m_AmfUpScalerSharpness = true; + } + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_AmfUpScalerSharpness ? 0.30 : 2.00); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); // Initialize with the size of the texture that will be input - res = m_AmfUpScaler->Init(outputSurfaceFormat, + m_AmfUpScalerSurfaceFormat = m_amfRGB ? SurfaceFormatRGB : SurfaceFormatYUV; + res = m_AmfUpScaler->Init(m_AmfUpScalerSurfaceFormat, m_DecoderParams.width, m_DecoderParams.height); if (res != AMF_OK) goto Error; @@ -777,34 +730,32 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) // Cannot use, not available for DirectX11 // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_FRC_API.md#21-component-initialization - // Downscale (to the app window size) initialization - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_OutputTexture.width, m_OutputTexture.height)); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FILL, true); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, 2.00); - m_AmfDownScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); - res = m_AmfDownScaler->Init(outputSurfaceFormat, - m_OutputTexture.width * m_ScaleUp, - m_OutputTexture.height * m_ScaleUp); - if (res != AMF_OK) goto Error; + if(m_amfRGB){ + // Convert back to YUV to be able to use Shaders ressources + m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); + m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, SurfaceFormatYUV); + m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); + m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); + m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); + res = m_AmfFormatConverterRGBtoYUV->Init(SurfaceFormatRGB, + m_DisplayWidth, + m_DisplayHeight); + if (res != AMF_OK) goto Error; + } if(!activate){ // Denoiser m_AmfDenoiser->Terminate(); m_AmfDenoiser = nullptr; - // Format converter - m_AmfFormatConverter->Terminate(); - m_AmfFormatConverter = nullptr; + // Format converter YUV to RGB + m_AmfFormatConverterYUVtoRGB->Terminate(); + m_AmfFormatConverterYUVtoRGB = nullptr; // Up Scaler m_AmfUpScaler->Terminate(); m_AmfUpScaler = nullptr; - // Down Scaler - m_AmfDownScaler->Terminate(); - m_AmfDownScaler = nullptr; + // Format converter RGB to YUV + m_AmfFormatConverterRGBtoYUV->Terminate(); + m_AmfFormatConverterRGBtoYUV = nullptr; // Context m_AmfContext->Terminate(); m_AmfContext = nullptr; @@ -1174,19 +1125,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // Set VSR and HDR if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - // We draw on a bigger output, this will give more space to any vendor Scale Up solution to generate more - // details with less artifacts around contrasted borders. - m_ScaleUp = 2; - if(m_DecoderParams.width == m_OutputTexture.width || m_DecoderParams.height == m_OutputTexture.height){ - // We don't scale up when the pixel ratio is 1:1 between the input frame and the output texture, - // it help to keep perfect pixel matching from original - m_ScaleUp = 1; - } - if(m_DisplayWidth > 2560 || m_DisplayHeight > 1440){ - // For anything bigger than 1440p, we don't scale as it will require to much ressources for low-end devices. - // We want to keep a ratio 1:1 pixel to avoid blur effect when the texture is scale down at rendering. - m_ScaleUp = 1; - } // Enable VSR feature if available if(m_VideoEnhancement->isVSRcapable()){ // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor @@ -1404,6 +1342,11 @@ void D3D11VARenderer::renderFrame(AVFrame* frame) // because the render target view will be unbound by Present(). m_DeviceContext->OMSetRenderTargets(1, &m_RenderTargetView, nullptr); + // Prepare the Enhanced Output + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + prepareEnhancedOutput(frame); + } + // Render our video frame with the aspect-ratio adjusted viewport renderVideo(frame); @@ -1601,14 +1544,15 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) } /** - * \brief Set the output colorspace + * \brief Set the output for enhanced rendering * - * According to the colorspace from the source, set the corresponding output colorspace + * According to the colorspace from the source, set the corresponding output colorspace. + * For AMF, disable the sharpness when HDR is on on Host * * \param AVFrame* frame The frame to be displayed on screen * \return void */ -void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) +void D3D11VARenderer::prepareEnhancedOutput(AVFrame* frame) { bool frameFullRange = isFrameFullRange(frame); int frameColorSpace = getFrameColorspace(frame); @@ -1633,11 +1577,11 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) enableNvidiaVideoSuperResolution(false); } if(m_AmfInitialized){ - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, frameFullRange ? AMF_COLOR_RANGE_FULL : AMF_COLOR_RANGE_STUDIO); - m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + // Disable sharpness when HDR is enable on client side because it generates white borders + m_AmfUpScaler->Flush(); + m_AmfUpScaler->Terminate(); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, 2.00); + m_AmfUpScaler->Init(m_AmfUpScalerSurfaceFormat, m_DecoderParams.width, m_DecoderParams.height); } break; @@ -1650,13 +1594,12 @@ void D3D11VARenderer::prepareVideoProcessorStream(AVFrame* frame) enableNvidiaVideoSuperResolution(); } if(m_AmfInitialized){ - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, false); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_UNDEFINED); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_UNDEFINED); - m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); + // Enable Sharpness for Non-HDR source (host) + m_AmfUpScaler->Flush(); + m_AmfUpScaler->Terminate(); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_AmfUpScalerSharpness ? 0.30 : 2.00); + m_AmfUpScaler->Init(m_AmfUpScalerSurfaceFormat, m_DecoderParams.width, m_DecoderParams.height); } - } } @@ -1675,49 +1618,54 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) srcBox.front = 0; srcBox.back = 1; - // Setup for AMD AMF if(m_AmfInitialized){ + // AMD (RX 7000+) + // Copy this frame (minus alignment padding) into a temporary video texture m_DeviceContext->CopySubresourceRegion(m_AmfTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); m_AmfContext->CreateSurfaceFromDX11Native(m_AmfTexture.Get(), &m_AmfInputSurface, nullptr); amf::AMFDataPtr m_AmfData(m_AmfInputSurface); - // Denoisier => Reduce deblocking artifacts due to compressed streamed content - m_AmfDenoiser->SubmitInput(m_AmfData); - m_AmfDenoiser->QueryOutput(&m_AmfData); - m_AmfDenoiser->Flush(); - - // Format converter => To provide best color rendering - m_AmfFormatConverter->SubmitInput(m_AmfData); - m_AmfFormatConverter->QueryOutput(&m_AmfData); - m_AmfFormatConverter->Flush(); + // // Denoisier => Reduce deblocking artifacts due to compressed streamed content + // // Note: Do not use yet this feature, it washes out colors + // m_AmfDenoiser->SubmitInput(m_AmfData); + // m_AmfDenoiser->QueryOutput(&m_AmfData); + // m_AmfDenoiser->Flush(); + + if(m_amfRGB){ + // Format converter => To provide best color rendering + m_AmfFormatConverterYUVtoRGB->SubmitInput(m_AmfData); + m_AmfFormatConverterYUVtoRGB->QueryOutput(&m_AmfData); + m_AmfFormatConverterYUVtoRGB->Flush(); + } // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity m_AmfUpScaler->SubmitInput(m_AmfData); m_AmfUpScaler->QueryOutput(&m_AmfData); m_AmfUpScaler->Flush(); - // We don't need to scale down if the pixel ratio is already 1:1 - if(m_ScaleUp != 1){ - // Down Scaling => To avoid a blur effect if relying on VideoProcessor, this method provides clear pixel rendering - m_AmfDownScaler->SubmitInput(m_AmfData); - m_AmfDownScaler->QueryOutput(&m_AmfData); - m_AmfDownScaler->Flush(); + if(m_amfRGB){ + // Format converter => To provide best color rendering + m_AmfFormatConverterRGBtoYUV->SubmitInput(m_AmfData); + m_AmfFormatConverterRGBtoYUV->QueryOutput(&m_AmfData); + m_AmfFormatConverterRGBtoYUV->Flush(); } amf::AMFSurfacePtr amfOutputSurface(m_AmfData); m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)amfOutputSurface->GetPlaneAt(0)->GetNative()); - } - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + } else if(m_VideoEnhancement->isVideoEnhancementEnabled() && !m_AmfInitialized){ + // NVIDIA RTX 2000+ + // Intel Arc+ + // Copy this frame (minus alignment padding) into a temporary video texture m_DeviceContext->CopySubresourceRegion(m_EnhancedTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); - // Prepare the Stream - prepareVideoProcessorStream(frame); // Process operations on the output Texture m_VideoContext->VideoProcessorBlt(m_VideoProcessor.Get(), m_OutputView.Get(), 0, 1, &m_StreamData); } else { + // No Enhancement processing + // Copy this frame (minus alignment padding) into a temporary video texture m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); } @@ -1730,6 +1678,7 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) // Process shaders on the output texture m_DeviceContext->DrawIndexed(6, 0, 0); + } /** @@ -1874,7 +1823,7 @@ bool D3D11VARenderer::initializeVideoProcessor() // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance. // It does work in addition to AI-enhancement for better result. if(m_VideoEnhancement->isVendorAMD()){ - // AMD has its own filters + // AMD does not have such filters } else if(m_VideoEnhancement->isVendorIntel()){ // Reduce blocking artifacts if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) @@ -1882,7 +1831,6 @@ bool D3D11VARenderer::initializeVideoProcessor() // Sharpen sligthly the picture to enhance details if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT) m_VideoContext->VideoProcessorSetStreamFilter(m_VideoProcessor.Get(), 0, D3D11_VIDEO_PROCESSOR_FILTER_EDGE_ENHANCEMENT, true, 20); // (0 / 0 / 100) - } else if(m_VideoEnhancement->isVendorNVIDIA()){ // Reduce blocking artifacts if (m_VideoProcessorCapabilities.FilterCaps & D3D11_VIDEO_PROCESSOR_FILTER_NOISE_REDUCTION) @@ -2337,13 +2285,20 @@ bool D3D11VARenderer::setupRenderingResources() { // Scale video to the window size while preserving aspect ratio SDL_Rect src, dst; - src.x = src.y = 0; - src.w = m_DecoderParams.width; - src.h = m_DecoderParams.height; - dst.x = dst.y = 0; - dst.w = m_DisplayWidth; - dst.h = m_DisplayHeight; - StreamUtils::scaleSourceToDestinationSurface(&src, &dst); + if(m_AmfInitialized){ + // We use the full window for AMF as AMF keeps the picture ratio with black border around. + dst.x = dst.y = 0; + dst.w = m_DisplayWidth; + dst.h = m_DisplayHeight; + } else { + src.x = src.y = 0; + src.w = m_DecoderParams.width; + src.h = m_DecoderParams.height; + dst.x = dst.y = 0; + dst.w = m_DisplayWidth; + dst.h = m_DisplayHeight; + StreamUtils::scaleSourceToDestinationSurface(&src, &dst); + } // Convert screen space to normalized device coordinates SDL_FRect renderRect; StreamUtils::screenSpaceToNormalizedDeviceCoords(&dst, &renderRect, m_DisplayWidth, m_DisplayHeight); @@ -2444,7 +2399,9 @@ bool D3D11VARenderer::setupAmfTexture() texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; - + if(m_AmfInitialized){ + texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; + } HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_AmfTexture.GetAddressOf()); if (FAILED(hr)) { // Handle error @@ -2467,10 +2424,7 @@ bool D3D11VARenderer::setupVideoTexture() D3D11_TEXTURE2D_DESC texDesc = {}; // Size of the output texture - if(m_AmfInitialized){ - texDesc.Width = m_OutputTexture.width; - texDesc.Height = m_OutputTexture.height; - } if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ texDesc.Width = m_DisplayWidth; texDesc.Height = m_DisplayHeight; } else { @@ -2480,11 +2434,7 @@ bool D3D11VARenderer::setupVideoTexture() texDesc.MipLevels = 1; texDesc.ArraySize = 1; - if(m_AmfInitialized){ - texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R10G10B10A2_UNORM : DXGI_FORMAT_R8G8B8A8_UNORM; - } else { - texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; - } + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; @@ -2495,7 +2445,6 @@ bool D3D11VARenderer::setupVideoTexture() } texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; - if(m_AmfInitialized){ texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; } @@ -2508,31 +2457,29 @@ bool D3D11VARenderer::setupVideoTexture() return false; } - if(!m_AmfInitialized){ - // Create luminance and chrominance SRVs for each plane of the texture - D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc = {}; - srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; - srvDesc.Texture2D.MostDetailedMip = 0; - srvDesc.Texture2D.MipLevels = 1; - srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0]); - if (FAILED(hr)) { - m_VideoTextureResourceViews[0] = nullptr; - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::CreateShaderResourceView() failed: %x", - hr); - return false; - } + // Create luminance and chrominance SRVs for each plane of the texture + D3D11_SHADER_RESOURCE_VIEW_DESC srvDesc = {}; + srvDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; + srvDesc.Texture2D.MostDetailedMip = 0; + srvDesc.Texture2D.MipLevels = 1; + srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM; + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[0]); + if (FAILED(hr)) { + m_VideoTextureResourceViews[0] = nullptr; + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "ID3D11Device::CreateShaderResourceView() failed: %x", + hr); + return false; + } - srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM; - hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[1]); - if (FAILED(hr)) { - m_VideoTextureResourceViews[1] = nullptr; - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "ID3D11Device::CreateShaderResourceView() failed: %x", - hr); - return false; - } + srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM; + hr = m_Device->CreateShaderResourceView(m_VideoTexture.Get(), &srvDesc, &m_VideoTextureResourceViews[1]); + if (FAILED(hr)) { + m_VideoTextureResourceViews[1] = nullptr; + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "ID3D11Device::CreateShaderResourceView() failed: %x", + hr); + return false; } return true; @@ -2551,8 +2498,13 @@ bool D3D11VARenderer::setupEnhancedTexture() D3D11_TEXTURE2D_DESC texDesc = {}; // Size of the output texture - texDesc.Width = m_DecoderParams.width; - texDesc.Height = m_DecoderParams.height; + if(m_AmfInitialized){ + texDesc.Width = m_OutputTexture.width; + texDesc.Height = m_OutputTexture.height; + } else { + texDesc.Width = m_DecoderParams.width; + texDesc.Height = m_DecoderParams.height; + } texDesc.MipLevels = 1; texDesc.ArraySize = 1; texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; @@ -2562,6 +2514,9 @@ bool D3D11VARenderer::setupEnhancedTexture() texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; texDesc.CPUAccessFlags = 0; texDesc.MiscFlags = 0; + if(m_AmfInitialized){ + texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; + } hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_EnhancedTexture.GetAddressOf()); if (FAILED(hr)) { diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index ba68b4db9..0a7744a1d 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -40,7 +40,7 @@ class D3D11VARenderer : public IFFmpegRenderer bool setupEnhancedTexture(); void renderOverlay(Overlay::OverlayType type); void bindColorConversion(AVFrame* frame); - void prepareVideoProcessorStream(AVFrame* frame); + void prepareEnhancedOutput(AVFrame* frame); void renderVideo(AVFrame* frame); bool createVideoProcessor(); bool initializeVideoProcessor(); @@ -97,7 +97,6 @@ class D3D11VARenderer : public IFFmpegRenderer ComPtr m_EnhancedTexture; ID3D11ShaderResourceView* m_VideoTextureResourceViews[2]; - float m_ScaleUp = 1; struct { int width; int height; @@ -117,12 +116,13 @@ class D3D11VARenderer : public IFFmpegRenderer amf::AMFContextPtr m_AmfContext; amf::AMFSurfacePtr m_AmfInputSurface; amf::AMFComponentPtr m_AmfDenoiser; - amf::AMFComponentPtr m_AmfFormatConverter; - amf::AMFComponentPtr m_AmfUpScaler; - // amf::AMFComponentPtr does not work for m_AmfDownScaler, have to use raw pointer - amf::AMFComponent* m_AmfDownScaler; - + amf::AMFComponentPtr m_AmfFormatConverterYUVtoRGB; + // amf::AMFComponentPtr does not work for m_AmfUpScaler, have to use raw pointer + amf::AMFComponent* m_AmfUpScaler; + amf::AMFComponentPtr m_AmfFormatConverterRGBtoYUV; + bool m_amfRGB = false; bool m_AmfInitialized = false; + bool m_AmfUpScalerSharpness = false; + amf::AMF_SURFACE_FORMAT m_AmfUpScalerSurfaceFormat; }; - From 336c7ad894264179185a85aaf4ea34bc35ffccbe Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 1 Apr 2024 00:42:36 +0200 Subject: [PATCH 34/53] Code formating --- .../video/ffmpeg-renderers/d3d11va.cpp | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 93bf45eb0..e5afc9c65 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -99,24 +99,24 @@ static_assert(sizeof(CSC_CONST_BUF) % 16 == 0, "Constant buffer sizes must be a D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) : m_DecoderSelectionPass(decoderSelectionPass), - m_Device(nullptr), - m_DeviceContext(nullptr), - m_RenderTargetView(nullptr), - m_VideoProcessor(nullptr), - m_VideoProcessorEnumerator(nullptr), - m_LastColorSpace(-1), - m_LastFullRange(false), - m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), - m_AllowTearing(false), - m_OverlayLock(0), - m_HwDeviceContext(nullptr), - m_AmfContext(nullptr), - m_AmfInputSurface(nullptr), - m_AmfDenoiser(nullptr), - m_AmfFormatConverterYUVtoRGB(nullptr), - m_AmfUpScaler(nullptr), - m_AmfFormatConverterRGBtoYUV(nullptr), - m_AmfInitialized(false) + m_Device(nullptr), + m_DeviceContext(nullptr), + m_RenderTargetView(nullptr), + m_VideoProcessor(nullptr), + m_VideoProcessorEnumerator(nullptr), + m_LastColorSpace(-1), + m_LastFullRange(false), + m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), + m_AllowTearing(false), + m_OverlayLock(0), + m_HwDeviceContext(nullptr), + m_AmfContext(nullptr), + m_AmfInputSurface(nullptr), + m_AmfDenoiser(nullptr), + m_AmfFormatConverterYUVtoRGB(nullptr), + m_AmfUpScaler(nullptr), + m_AmfFormatConverterRGBtoYUV(nullptr), + m_AmfInitialized(false) { RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); From c63715199d66df2ba90da15457a8b9fc46ea7e79 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 1 Apr 2024 00:57:16 +0200 Subject: [PATCH 35/53] Fixing merge issues Fixing merge issues --- .../video/ffmpeg-renderers/d3d11va.cpp | 95 +------------------ .../video/ffmpeg-renderers/d3d11va.h | 7 -- 2 files changed, 4 insertions(+), 98 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index ff2e84272..0df57e5c4 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -259,40 +259,6 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &streamHDRMetaData ); - // Set HDR Input for AMF Converter - if(m_AmfInitialized){ - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); - - // Values taken from AMF Sample: - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L740 - // We can initialize with Studio range first, it will be corrected to Full range if needed once the first frame is received. - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_COLOR_RANGE, AMF_COLOR_RANGE_STUDIO); - - AMFHDRMetadata amfHDRMetadata; - amfHDRMetadata.redPrimary[0] = amf_uint16(streamHDRMetaData.RedPrimary[0]); - amfHDRMetadata.redPrimary[1] = amf_uint16(streamHDRMetaData.RedPrimary[1]); - amfHDRMetadata.greenPrimary[0] = amf_uint16(streamHDRMetaData.GreenPrimary[0]); - amfHDRMetadata.greenPrimary[1] = amf_uint16(streamHDRMetaData.GreenPrimary[1]); - amfHDRMetadata.bluePrimary[0] = amf_uint16(streamHDRMetaData.BluePrimary[0]); - amfHDRMetadata.bluePrimary[1] = amf_uint16(streamHDRMetaData.BluePrimary[1]); - amfHDRMetadata.whitePoint[0] = amf_uint16(streamHDRMetaData.WhitePoint[0]); - amfHDRMetadata.whitePoint[1] = amf_uint16(streamHDRMetaData.WhitePoint[1]); - amfHDRMetadata.maxMasteringLuminance = amf_uint32(streamHDRMetaData.MaxMasteringLuminance); - amfHDRMetadata.minMasteringLuminance = amf_uint32(streamHDRMetaData.MinMasteringLuminance); - amfHDRMetadata.maxContentLightLevel = 0; - amfHDRMetadata.maxFrameAverageLightLevel = 0; - - amf::AMFBufferPtr pHDRMetaDataBuffer; - m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); - AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); - memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_INPUT_HDR_METADATA, pData); - - m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); - } - streamSet = true; } SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, @@ -342,39 +308,6 @@ void D3D11VARenderer::setHdrMode(bool enabled){ &outputHDRMetaData ); - // Set HDR Input for AMF Converter - if(m_AmfInitialized){ - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_USE_DECODER_HDR_METADATA, true); - - // Values taken from AMF Sample: - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/5b32766b801434be61350c292127a9ac022b1268/amf/public/samples/CPPSamples/common/SwapChainDXGI.cpp#L732 - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_TRANSFER_CHARACTERISTIC, AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_PRIMARIES, AMF_COLOR_PRIMARIES_BT2020); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_COLOR_RANGE, AMF_COLOR_RANGE_FULL); - - AMFHDRMetadata amfHDRMetadata; - amfHDRMetadata.redPrimary[0] = amf_uint16(outputHDRMetaData.RedPrimary[0]); - amfHDRMetadata.redPrimary[1] = amf_uint16(outputHDRMetaData.RedPrimary[1]); - amfHDRMetadata.greenPrimary[0] = amf_uint16(outputHDRMetaData.GreenPrimary[0]); - amfHDRMetadata.greenPrimary[1] = amf_uint16(outputHDRMetaData.GreenPrimary[1]); - amfHDRMetadata.bluePrimary[0] = amf_uint16(outputHDRMetaData.BluePrimary[0]); - amfHDRMetadata.bluePrimary[1] = amf_uint16(outputHDRMetaData.BluePrimary[1]); - amfHDRMetadata.whitePoint[0] = amf_uint16(outputHDRMetaData.WhitePoint[0]); - amfHDRMetadata.whitePoint[1] = amf_uint16(outputHDRMetaData.WhitePoint[1]); - amfHDRMetadata.maxMasteringLuminance = amf_uint32(outputHDRMetaData.MaxMasteringLuminance); - amfHDRMetadata.minMasteringLuminance = amf_uint32(outputHDRMetaData.MinMasteringLuminance); - amfHDRMetadata.maxContentLightLevel = 0; - amfHDRMetadata.maxFrameAverageLightLevel = 0; - - amf::AMFBufferPtr pHDRMetaDataBuffer; - m_AmfContext->AllocBuffer(amf::AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &pHDRMetaDataBuffer); - AMFHDRMetadata* pData = (AMFHDRMetadata*)pHDRMetaDataBuffer->GetNative(); - memcpy(pData, &amfHDRMetadata, sizeof(AMFHDRMetadata)); - m_AmfFormatConverter->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_HDR_METADATA, pData); - - m_AmfFormatConverter->Init(amf::AMF_SURFACE_P010, m_DecoderParams.width, m_DecoderParams.height); - } - displaySet = true; } } @@ -759,8 +692,8 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); res = m_AmfFormatConverterYUVtoRGB->Init(SurfaceFormatYUV, - m_DecoderParams.width, - m_DecoderParams.height); + m_DecoderParams.width, + m_DecoderParams.height); if (res != AMF_OK) goto Error; } @@ -1192,19 +1125,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) // Set VSR and HDR if(m_VideoEnhancement->isVideoEnhancementEnabled()){ - // We draw on a bigger output, this will give more space to any vendor Scale Up solution to generate more - // details with less artifacts around contrasted borders. - m_ScaleUp = 2; - if(m_DecoderParams.width == m_OutputTexture.width || m_DecoderParams.height == m_OutputTexture.height){ - // We don't scale up when the pixel ratio is 1:1 between the input frame and the output texture, - // it help to keep perfect pixel matching from original - m_ScaleUp = 1; - } - if(m_DisplayWidth > 2560 || m_DisplayHeight > 1440){ - // For anything bigger than 1440p, we don't scale as it will require to much ressources for low-end devices. - // We want to keep a ratio 1:1 pixel to avoid blur effect when the texture is scale down at rendering. - m_ScaleUp = 1; - } // Enable VSR feature if available if(m_VideoEnhancement->isVSRcapable()){ // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor @@ -1356,7 +1276,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) m_HwDeviceContext = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_D3D11VA); if (!m_HwDeviceContext) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Failed to allocate D3D11VA device context"); + "Failed to allocate D3D11VA device context"); return false; } @@ -1833,8 +1753,6 @@ bool D3D11VARenderer::createVideoProcessor() bool D3D11VARenderer::initializeVideoProcessor() { HRESULT hr; - D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc; - D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc; // INPUT setting D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc; @@ -1901,7 +1819,6 @@ bool D3D11VARenderer::initializeVideoProcessor() } else { m_VideoContext->VideoProcessorSetOutputColorSpace1(m_VideoProcessor.Get(), DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709); } - m_VideoContext->VideoProcessorSetStreamDestRect(m_VideoProcessor.Get(), 0, true, &dstRect); // The section is a customization per vendor to slightly enhance (non-AI methods) the frame appearance. // It does work in addition to AI-enhancement for better result. @@ -2517,11 +2434,7 @@ bool D3D11VARenderer::setupVideoTexture() texDesc.MipLevels = 1; texDesc.ArraySize = 1; - if(m_VideoEnhancement->isVideoEnhancementEnabled() && m_AmfInitialized){ - texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R10G10B10A2_UNORM : DXGI_FORMAT_R8G8B8A8_UNORM; - } else { - texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; - } + texDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_P010 : DXGI_FORMAT_NV12; texDesc.SampleDesc.Quality = 0; texDesc.SampleDesc.Count = 1; texDesc.Usage = D3D11_USAGE_DEFAULT; diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 6694b82cd..0a7744a1d 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -96,13 +96,6 @@ class D3D11VARenderer : public IFFmpegRenderer ComPtr m_VideoTexture; ComPtr m_EnhancedTexture; ID3D11ShaderResourceView* m_VideoTextureResourceViews[2]; - float m_ScaleUp = 1; - struct { - int width; - int height; - int left; - int top; - } m_OutputTexture; struct { int width; From 889bd1629f978ebf8b428d08d31711568dcf5f32 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 1 Apr 2024 03:05:46 +0200 Subject: [PATCH 36/53] Fix Intel HDR grey screen When the Host has the HDR off, and if Moonlight has HDR on, the screen appears slightly grey. --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 9878152e8..fcded1310 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -1586,6 +1586,9 @@ void D3D11VARenderer::prepareEnhancedOutput(AVFrame* frame) // VSR from Nvidia does not work yet on HDR content (Observation by March 28th, 2024) // https://en.wikipedia.org/wiki/Video_Super_Resolution#:~:text=The%20feature%20supports%20input%20resolutions,likely%20added%20in%20the%20future enableNvidiaVideoSuperResolution(false); + } else if(m_VideoEnhancement->isVendorIntel()){ + // Enable VSR for Intel when the Host has HDR activated. + enableIntelVideoSuperResolution(); } if(m_AmfInitialized){ // Disable sharpness when HDR is enable on client side because it generates white borders @@ -1603,6 +1606,9 @@ void D3D11VARenderer::prepareEnhancedOutput(AVFrame* frame) if(m_VideoEnhancement->isVendorNVIDIA()){ // Always enable NVIDIA VSR for SDR content enableNvidiaVideoSuperResolution(); + } else if(m_VideoEnhancement->isVendorIntel()){ + // Disable VSR for Intel when the Host has HDR disactivated to avoid having a grey screen. + enableIntelVideoSuperResolution(false); } if(m_AmfInitialized){ // Enable Sharpness for Non-HDR source (host) From 58ed19bb4131622801c8df119a3c89c493438408 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 5 Apr 2024 18:45:19 +0200 Subject: [PATCH 37/53] More explicit UI information when Video Enhancement is unavailable The checkbox is greyed out instead of being hidden. --- app/backend/systemproperties.cpp | 2 +- app/gui/SettingsView.qml | 14 +++++++++----- .../video/ffmpeg-renderers/d3d11va.cpp | 17 ++++++++--------- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/app/backend/systemproperties.cpp b/app/backend/systemproperties.cpp index c5dcdf850..55cbade3c 100644 --- a/app/backend/systemproperties.cpp +++ b/app/backend/systemproperties.cpp @@ -251,7 +251,7 @@ void SystemProperties::refreshDisplaysInternal() bool SystemProperties::isVideoEnhancementCapable() { VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); - return videoEnhancement->isUIvisible(); + return videoEnhancement->isVSRcapable() || videoEnhancement->isHDRcapable(); } /** diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index ba4404703..ee3a6b266 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -819,9 +819,10 @@ Flickable { hoverEnabled: true text: qsTr("Video AI-Enhancement") font.pointSize: 12 - visible: SystemProperties.isVideoEnhancementCapable() - enabled: true - checked: StreamingPreferences.videoEnhancement + enabled: SystemProperties.isVideoEnhancementCapable() + checked: { + return SystemProperties.isVideoEnhancementCapable() && StreamingPreferences.videoEnhancement + } onCheckedChanged: { StreamingPreferences.videoEnhancement = checked @@ -838,8 +839,11 @@ Flickable { + qsTr("\n - Be advised that using this feature on laptops running on battery power may lead to significant battery drain.") Component.onCompleted: { - // Indicate if the feature is available but not officially deployed by the Vendor - if(SystemProperties.isVideoEnhancementExperimental()){ + if (!SystemProperties.isVideoEnhancementCapable()){ + // VSR or SDR->HDR feature could not be initialized by any GPU available + text = qsTr("Video AI-Enhancement (Not supported by the GPU)") + } else if(SystemProperties.isVideoEnhancementExperimental()){ + // Indicate if the feature is available but not officially deployed by the Vendor text = qsTr("Video AI-Enhancement (Experimental)") } } diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index fcded1310..e46c432ca 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -576,16 +576,14 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() } else if(m_VideoEnhancement->isVendorNVIDIA()){ m_VideoEnhancement->setVSRcapable(enableNvidiaVideoSuperResolution(false)); m_VideoEnhancement->setHDRcapable(enableNvidiaHDR(false)); + } else if (m_VideoProcessorCapabilities.AutoStreamCaps & D3D11_VIDEO_PROCESSOR_AUTO_STREAM_CAPS_SUPER_RESOLUTION){ + // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor + m_AutoStreamSuperResolution = true; + m_VideoEnhancement->setVSRcapable(true); } // Enable the visibility of Video enhancement feature in the settings of the User interface m_VideoEnhancement->enableUIvisible(); - } else { - // Try Auto Stream Super Resolution provided by DirectX11+ and agnostic to any Vendor - if (m_VideoProcessorCapabilities.AutoStreamCaps & D3D11_VIDEO_PROCESSOR_AUTO_STREAM_CAPS_SUPER_RESOLUTION){ - m_AutoStreamSuperResolution = true; - m_VideoEnhancement->setVSRcapable(true); - } } } } @@ -606,16 +604,17 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() /** * \brief Enable Video Super-Resolution for AMD GPU * - * This feature is available starting from AMD series 7000 and driver AMD Software 24.1.1 (Jan 23, 2024) + * This feature is available since this drive 22.3.1 (March 2022) * https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 * * \param bool activate Default is true, at true it enables the use of Video Super-Resolution feature * \return bool Return true if the capability is available */ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo){ - // The feature is available since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md + // The feature is announced since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 + // But it is available as SDK since March 2022 (22.3.1) which mean it might also work for series 5000 and 6000 (to be tested) + // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md AMF_RESULT res; amf::AMFCapsPtr amfCaps; From 44971fecdd17cb095511a2bc1a47be94fa9db6b9 Mon Sep 17 00:00:00 2001 From: Bruno Martin Date: Sun, 21 Apr 2024 21:59:57 +0200 Subject: [PATCH 38/53] AMF rendering speed optimization (FSR and Memory) - Force to use FSR 1.0 instead of FSR 1.1 as the second one force to convert YUV->RGB->YUV which is too heavy for iGPU, and we have to efficient way to identify a dedicated GPU from an APU. - Some memory optimization in the renderFrame method to accelerate the render. --- .../video/ffmpeg-renderers/d3d11va.cpp | 180 +++--------------- .../video/ffmpeg-renderers/d3d11va.h | 8 +- 2 files changed, 27 insertions(+), 161 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index e46c432ca..5c8251827 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -11,11 +11,8 @@ #include "public/common/AMFFactory.h" #include "public/include/core/Platform.h" -#include "public/include/components/VideoConverter.h" // Video upscaling & Sharpening #include "public/include/components/HQScaler.h" -// Reducing blocking artifacts -#include "public/include/components/VQEnhancer.h" #include #include @@ -113,11 +110,9 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_OverlayLock(0), m_HwDeviceContext(nullptr), m_AmfContext(nullptr), - m_AmfInputSurface(nullptr), - m_AmfDenoiser(nullptr), - m_AmfFormatConverterYUVtoRGB(nullptr), + m_AmfSurface(nullptr), + m_AmfData(nullptr), m_AmfUpScaler(nullptr), - m_AmfFormatConverterRGBtoYUV(nullptr), m_AmfInitialized(false) { RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); @@ -142,26 +137,11 @@ D3D11VARenderer::~D3D11VARenderer() SAFE_COM_RELEASE(m_RenderTargetView); // cleanup AMF instances - if(m_AmfDenoiser){ - // Denoiser - m_AmfDenoiser->Terminate(); - m_AmfDenoiser = nullptr; - } - if(m_AmfFormatConverterYUVtoRGB){ - // Format converter YUV to RGB - m_AmfFormatConverterYUVtoRGB->Terminate(); - m_AmfFormatConverterYUVtoRGB = nullptr; - } if(m_AmfUpScaler){ // Up Scaler m_AmfUpScaler->Terminate(); m_AmfUpScaler = nullptr; } - if(m_AmfFormatConverterRGBtoYUV){ - // Format converter RGB to YUV - m_AmfFormatConverterRGBtoYUV->Terminate(); - m_AmfFormatConverterRGBtoYUV = nullptr; - } if(m_AmfContext){ // Context m_AmfContext->Terminate(); @@ -613,7 +593,7 @@ int D3D11VARenderer::getAdapterIndexByEnhancementCapabilities() bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo){ // The feature is announced since Jan 23rd, 2024, with the driver 24.1.1 and on series 7000 // https://community.amd.com/t5/gaming/amd-software-24-1-1-amd-fluid-motion-frames-an-updated-ui-and/ba-p/656213 - // But it is available as SDK since March 2022 (22.3.1) which mean it might also work for series 5000 and 6000 (to be tested) + // But it is available as SDK since March 2022 (22.3.1) which means it might also work for series 5000 and 6000 (to be tested) // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_HQ_Scaler_API.md AMF_RESULT res; @@ -625,8 +605,6 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) return true; amf::AMF_SURFACE_FORMAT SurfaceFormatYUV; - amf::AMF_SURFACE_FORMAT SurfaceFormatRGB; - AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM outputColorSpace; AMFColor backgroundColor = AMFConstructColor(0, 0, 0, 255); // AMF Context initialization @@ -634,14 +612,8 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) if (res != AMF_OK) goto Error; res = g_AMFFactory.GetFactory()->CreateContext(&m_AmfContext); if (res != AMF_OK) goto Error; - res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVQEnhancer, &m_AmfDenoiser); - if (res != AMF_OK) goto Error; - res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverterYUVtoRGB); - if (res != AMF_OK) goto Error; res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFHQScaler, &m_AmfUpScaler); if (res != AMF_OK) goto Error; - res = g_AMFFactory.GetFactory()->CreateComponent(m_AmfContext, AMFVideoConverter, &m_AmfFormatConverterRGBtoYUV); - if (res != AMF_OK) goto Error; res = m_AmfContext->InitDX11(m_Device); if (res != AMF_OK) goto Error; @@ -653,71 +625,21 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) goto Error; } - // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x in RGBA - // When HDR is used, keep YUV format as RGBA render wrong colorization - if( - static_cast(m_DisplayWidth) / m_DecoderParams.width >= 1.1 - && static_cast(m_DisplayWidth) / m_DecoderParams.width <= 2 - && static_cast(m_DisplayHeight) / m_DecoderParams.height >= 1.1 - && static_cast(m_DisplayHeight) / m_DecoderParams.height <= 2 - && !(m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) - ){ - m_amfRGB = true; - } else { - m_amfRGB = false; - } - // Format initialization SurfaceFormatYUV = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_P010 : amf::AMF_SURFACE_NV12; - SurfaceFormatRGB = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? amf::AMF_SURFACE_R10G10B10A2 : amf::AMF_SURFACE_RGBA; - outputColorSpace = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) - ? AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020 - : AMF_VIDEO_CONVERTER_COLOR_PROFILE_709; - // Input Surace initialization + // Input Surface initialization res = m_AmfContext->AllocSurface(amf::AMF_MEMORY_DX11, SurfaceFormatYUV, m_DecoderParams.width, m_DecoderParams.height, - &m_AmfInputSurface); - if (res != AMF_OK) goto Error; - - // Denoiser initialization (Reduce blocking artifacts) - // Note: Do not use yet this feature, it washes out colors, impacts negatively the visual by loosing details, - // and also the attenuation value does not change anything. - m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_OUTPUT_SIZE, ::AMFConstructSize(m_DecoderParams.width, m_DecoderParams.height)); - m_AmfDenoiser->SetProperty(AMF_VIDEO_ENHANCER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); - m_AmfDenoiser->SetProperty(AMF_VE_FCR_ATTENUATION, 0.10); - m_AmfDenoiser->SetProperty(AMF_VE_FCR_SPLIT_VIEW, 0); // When set to 1, it enables a side by side comparison view - res = m_AmfDenoiser->Init(SurfaceFormatYUV, - m_DecoderParams.width, - m_DecoderParams.height); + &m_AmfSurface); if (res != AMF_OK) goto Error; - if(m_amfRGB){ - // Convert to RGB to enable the use of FSR 1.1 - m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); - m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, SurfaceFormatRGB); - m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); - m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); - m_AmfFormatConverterYUVtoRGB->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); - res = m_AmfFormatConverterYUVtoRGB->Init(SurfaceFormatYUV, - m_DecoderParams.width, - m_DecoderParams.height); - if (res != AMF_OK) goto Error; - } - // Upscale initialization m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_OUTPUT_SIZE, ::AMFConstructSize(m_DisplayWidth, m_DisplayHeight)); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ENGINE_TYPE, amf::AMF_MEMORY_DX11); - // VideoSR1.1 only supports upscaling ratio from 1.1x to 2.0x in RGBA - if(m_amfRGB){ - // Compare to FSR 1.0, FSR 1.1 improvements image quality, with a better artifacts reduction and improved edge sharpnening - m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_1); - } else { - m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); - } - m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FROM_SRGB, true); + m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_ALGORITHM, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_KEEP_ASPECT_RATIO, true); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL, true); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FILL_COLOR, backgroundColor); @@ -730,42 +652,16 @@ bool D3D11VARenderer::enableAMDVideoSuperResolution(bool activate, bool logInfo) m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_SHARPNESS, m_AmfUpScalerSharpness ? 0.30 : 2.00); m_AmfUpScaler->SetProperty(AMF_HQ_SCALER_FRAME_RATE, m_DecoderParams.frameRate); // Initialize with the size of the texture that will be input - m_AmfUpScalerSurfaceFormat = m_amfRGB ? SurfaceFormatRGB : SurfaceFormatYUV; - res = m_AmfUpScaler->Init(m_AmfUpScalerSurfaceFormat, + m_AmfUpScalerSurfaceFormat = SurfaceFormatYUV; + res = m_AmfUpScaler->Init(SurfaceFormatYUV, m_DecoderParams.width, m_DecoderParams.height); if (res != AMF_OK) goto Error; - // Frame Generation - // Cannot use, not available for DirectX11 - // https://github.com/GPUOpen-LibrariesAndSDKs/AMF/blob/master/amf/doc/AMF_FRC_API.md#21-component-initialization - - if(m_amfRGB){ - // Convert back to YUV to be able to use Shaders ressources - m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_MEMORY_TYPE, amf::AMF_MEMORY_DX11); - m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, SurfaceFormatYUV); - m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_COLOR_PROFILE, outputColorSpace); - m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_FILL, true); - m_AmfFormatConverterRGBtoYUV->SetProperty(AMF_VIDEO_CONVERTER_FILL_COLOR, backgroundColor); - res = m_AmfFormatConverterRGBtoYUV->Init(SurfaceFormatRGB, - m_DisplayWidth, - m_DisplayHeight); - if (res != AMF_OK) goto Error; - } - if(!activate){ - // Denoiser - m_AmfDenoiser->Terminate(); - m_AmfDenoiser = nullptr; - // Format converter YUV to RGB - m_AmfFormatConverterYUVtoRGB->Terminate(); - m_AmfFormatConverterYUVtoRGB = nullptr; // Up Scaler m_AmfUpScaler->Terminate(); m_AmfUpScaler = nullptr; - // Format converter RGB to YUV - m_AmfFormatConverterRGBtoYUV->Terminate(); - m_AmfFormatConverterRGBtoYUV = nullptr; // Context m_AmfContext->Terminate(); m_AmfContext = nullptr; @@ -1311,6 +1207,13 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + m_SrcBox.left = 0; + m_SrcBox.top = 0; + m_SrcBox.right = m_DecoderParams.width; + m_SrcBox.bottom = m_DecoderParams.height; + m_SrcBox.front = 0; + m_SrcBox.back = 1; + // Create our video textures and SRVs if (!setupEnhancedTexture() || !setupVideoTexture() || !setupAmfTexture()) { return false; @@ -1626,64 +1529,33 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) UINT offset = 0; m_DeviceContext->IASetVertexBuffers(0, 1, m_VideoVertexBuffer.GetAddressOf(), &stride, &offset); - D3D11_BOX srcBox; - srcBox.left = 0; - srcBox.top = 0; - srcBox.right = m_DecoderParams.width; - srcBox.bottom = m_DecoderParams.height; - srcBox.front = 0; - srcBox.back = 1; - if(m_AmfInitialized){ - // AMD (RX 7000+) + // AMD (RDNA2+) // Copy this frame (minus alignment padding) into a temporary video texture - m_DeviceContext->CopySubresourceRegion(m_AmfTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); - m_AmfContext->CreateSurfaceFromDX11Native(m_AmfTexture.Get(), &m_AmfInputSurface, nullptr); - - amf::AMFDataPtr m_AmfData(m_AmfInputSurface); - - // // Denoisier => Reduce deblocking artifacts due to compressed streamed content - // // Note: Do not use yet this feature, it washes out colors - // m_AmfDenoiser->SubmitInput(m_AmfData); - // m_AmfDenoiser->QueryOutput(&m_AmfData); - // m_AmfDenoiser->Flush(); - - if(m_amfRGB){ - // Format converter => To provide best color rendering - m_AmfFormatConverterYUVtoRGB->SubmitInput(m_AmfData); - m_AmfFormatConverterYUVtoRGB->QueryOutput(&m_AmfData); - m_AmfFormatConverterYUVtoRGB->Flush(); - } + m_DeviceContext->CopySubresourceRegion(m_AmfTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &m_SrcBox); + m_AmfContext->CreateSurfaceFromDX11Native(m_AmfTexture.Get(), &m_AmfSurface, nullptr); // Up Scaling => To a higher resolution than the application window to give more surface to the VSR to generate details and thus picture clarity - m_AmfUpScaler->SubmitInput(m_AmfData); + m_AmfUpScaler->SubmitInput(m_AmfSurface); m_AmfUpScaler->QueryOutput(&m_AmfData); m_AmfUpScaler->Flush(); - if(m_amfRGB){ - // Format converter => To provide best color rendering - m_AmfFormatConverterRGBtoYUV->SubmitInput(m_AmfData); - m_AmfFormatConverterRGBtoYUV->QueryOutput(&m_AmfData); - m_AmfFormatConverterRGBtoYUV->Flush(); - } - - amf::AMFSurfacePtr amfOutputSurface(m_AmfData); - m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)amfOutputSurface->GetPlaneAt(0)->GetNative()); - + m_AmfData->QueryInterface(amf::AMFSurface::IID(), reinterpret_cast(&m_AmfSurface)); + m_DeviceContext->CopyResource(m_VideoTexture.Get(), (ID3D11Texture2D*)m_AmfSurface->GetPlaneAt(0)->GetNative()); } else if(m_VideoEnhancement->isVideoEnhancementEnabled() && !m_AmfInitialized){ // NVIDIA RTX 2000+ // Intel Arc+ // Copy this frame (minus alignment padding) into a temporary video texture - m_DeviceContext->CopySubresourceRegion(m_EnhancedTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + m_DeviceContext->CopySubresourceRegion(m_EnhancedTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &m_SrcBox); // Process operations on the output Texture m_VideoContext->VideoProcessorBlt(m_VideoProcessor.Get(), m_OutputView.Get(), 0, 1, &m_StreamData); } else { // No Enhancement processing // Copy this frame (minus alignment padding) into a temporary video texture - m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &srcBox); + m_DeviceContext->CopySubresourceRegion(m_VideoTexture.Get(), 0, 0, 0, 0, (ID3D11Resource*)frame->data[0], (int)(intptr_t)frame->data[1], &m_SrcBox); } // Bind our CSC shader (and constant buffer, if required) @@ -1694,7 +1566,6 @@ void D3D11VARenderer::renderVideo(AVFrame* frame) // Process shaders on the output texture m_DeviceContext->DrawIndexed(6, 0, 0); - } /** @@ -2437,10 +2308,7 @@ bool D3D11VARenderer::setupAmfTexture() texDesc.Usage = D3D11_USAGE_DEFAULT; texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; texDesc.CPUAccessFlags = 0; - texDesc.MiscFlags = 0; - if(m_AmfInitialized){ - texDesc.MiscFlags |= D3D11_RESOURCE_MISC_SHARED; - } + texDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED; HRESULT hr = m_Device->CreateTexture2D(&texDesc, nullptr, m_AmfTexture.GetAddressOf()); if (FAILED(hr)) { // Handle error diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index 75c1799dd..b4d340cb2 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -68,6 +68,7 @@ class D3D11VARenderer : public IFFmpegRenderer ComPtr m_SwapChain; ID3D11RenderTargetView* m_RenderTargetView; SDL_mutex* m_ContextLock; + D3D11_BOX m_SrcBox; ComPtr m_VideoDevice; ComPtr m_VideoContext; @@ -117,13 +118,10 @@ class D3D11VARenderer : public IFFmpegRenderer // AMD (AMF) amf::AMFContextPtr m_AmfContext; - amf::AMFSurfacePtr m_AmfInputSurface; - amf::AMFComponentPtr m_AmfDenoiser; - amf::AMFComponentPtr m_AmfFormatConverterYUVtoRGB; + amf::AMFSurfacePtr m_AmfSurface; + amf::AMFDataPtr m_AmfData; // amf::AMFComponentPtr does not work for m_AmfUpScaler, have to use raw pointer amf::AMFComponent* m_AmfUpScaler; - amf::AMFComponentPtr m_AmfFormatConverterRGBtoYUV; - bool m_amfRGB = false; bool m_AmfInitialized = false; bool m_AmfUpScalerSharpness = false; amf::AMF_SURFACE_FORMAT m_AmfUpScalerSurfaceFormat; From ad903e772ea35737cc1785dcbf8fffaa84699104 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 6 May 2024 23:33:46 +0200 Subject: [PATCH 39/53] Add Video Super Resolution for MacOS using MetalFX - Add The library MetalFX - Check the Upscaling capability based on MacOS version (13+) - Add Spacial scaler for Luma and Chroma textures --- app/app.pro | 2 +- .../video/ffmpeg-renderers/vt_metal.mm | 242 +++++++++++++++--- 2 files changed, 201 insertions(+), 43 deletions(-) diff --git a/app/app.pro b/app/app.pro index f2f99c634..3fa20eb09 100644 --- a/app/app.pro +++ b/app/app.pro @@ -153,7 +153,7 @@ win32:!winrt { } macx { LIBS += -lssl -lcrypto -lavcodec.61 -lavutil.59 -lopus -framework SDL2 -framework SDL2_ttf - LIBS += -lobjc -framework VideoToolbox -framework AVFoundation -framework CoreVideo -framework CoreGraphics -framework CoreMedia -framework AppKit -framework Metal -framework QuartzCore + LIBS += -lobjc -framework VideoToolbox -framework AVFoundation -framework CoreVideo -framework CoreGraphics -framework CoreMedia -framework AppKit -framework Metal -framework MetalFx -framework QuartzCore # For libsoundio LIBS += -framework CoreAudio -framework AudioUnit diff --git a/app/streaming/video/ffmpeg-renderers/vt_metal.mm b/app/streaming/video/ffmpeg-renderers/vt_metal.mm index ac3fa326d..50dc97ecb 100644 --- a/app/streaming/video/ffmpeg-renderers/vt_metal.mm +++ b/app/streaming/video/ffmpeg-renderers/vt_metal.mm @@ -15,9 +15,11 @@ #import #import #import -#import +#import #import +#include "streaming/video/videoenhancement.h" + struct CscParams { vector_float3 matrix[3]; @@ -123,8 +125,15 @@ m_LastDrawableHeight(-1), m_PresentationMutex(SDL_CreateMutex()), m_PresentationCond(SDL_CreateCond()), - m_PendingPresentationCount(0) + m_PendingPresentationCount(0), + m_LumaTexture(nullptr), + m_LumaUpscaledTexture(nullptr), + m_LumaUpscaler(nullptr), + m_ChromaTexture(nullptr), + m_ChromaUpscaledTexture(nullptr), + m_ChromaUpscaler(nullptr) { + m_VideoEnhancement = &VideoEnhancement::getInstance(); } virtual ~VTMetalRenderer() override @@ -171,6 +180,30 @@ [m_CommandQueue release]; } + if (m_LumaTexture != nullptr) { + [m_LumaTexture release]; + } + + if (m_LumaUpscaledTexture != nullptr) { + [m_LumaUpscaledTexture release]; + } + + if (m_LumaUpscaler != nullptr) { + [m_LumaUpscaler release]; + } + + if (m_ChromaTexture != nullptr) { + [m_ChromaTexture release]; + } + + if (m_ChromaUpscaledTexture != nullptr) { + [m_ChromaUpscaledTexture release]; + } + + if (m_ChromaUpscaler != nullptr) { + [m_ChromaUpscaler release]; + } + if (m_TextureCache != nullptr) { CFRelease(m_TextureCache); } @@ -387,62 +420,145 @@ virtual void renderFrame(AVFrame* frame) override return; } - // Create Metal textures for the planes of the CVPixelBuffer - std::array textures; - for (size_t i = 0; i < textures.size(); i++) { - MTLPixelFormat fmt; - - switch (CVPixelBufferGetPixelFormatType(pixBuf)) { - case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: - case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: - fmt = (i == 0) ? MTLPixelFormatR8Unorm : MTLPixelFormatRG8Unorm; - break; + switch (CVPixelBufferGetPixelFormatType(pixBuf)) { + case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: + m_LumaPixelFormart = MTLPixelFormatR8Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG8Unorm; + break; + + case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: + case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: + m_LumaPixelFormart = MTLPixelFormatR16Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG16Unorm; + break; + case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange: + m_LumaPixelFormart = MTLPixelFormatR8Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG8Unorm; + break; + default: + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Unknown pixel format: %x", + CVPixelBufferGetPixelFormatType(pixBuf)); + return; + } - case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: - case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: - fmt = (i == 0) ? MTLPixelFormatR16Unorm : MTLPixelFormatRG16Unorm; - break; + CVReturn err; - default: - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Unknown pixel format: %x", - CVPixelBufferGetPixelFormatType(pixBuf)); - return; - } - - CVReturn err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_TextureCache, pixBuf, nullptr, fmt, - CVPixelBufferGetWidthOfPlane(pixBuf, i), - CVPixelBufferGetHeightOfPlane(pixBuf, i), - i, - &textures[i]); - if (err != kCVReturnSuccess) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "CVMetalTextureCacheCreateTextureFromImage() failed: %d", - err); - return; - } + err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_TextureCache, + pixBuf, + nullptr, + m_LumaPixelFormart, + CVPixelBufferGetWidthOfPlane(pixBuf, 0), + CVPixelBufferGetHeightOfPlane(pixBuf, 0), + 0, + &m_cvLumaTexture); + if (err != kCVReturnSuccess) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CVMetalTextureCacheCreateTextureFromImage() failed: %d", + err); + return; } + m_LumaTexture = CVMetalTextureGetTexture(m_cvLumaTexture); + + err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_TextureCache, + pixBuf, + nullptr, + m_ChromaPixelFormart, + CVPixelBufferGetWidthOfPlane(pixBuf, 1), + CVPixelBufferGetHeightOfPlane(pixBuf, 1), + 1, + &m_cvChromaTexture); + if (err != kCVReturnSuccess) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CVMetalTextureCacheCreateTextureFromImage() failed: %d", + err); + return; + } + m_ChromaTexture = CVMetalTextureGetTexture(m_cvChromaTexture); // Prepare a render pass to render into the next drawable - auto renderPassDescriptor = [MTLRenderPassDescriptor renderPassDescriptor]; + MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor renderPassDescriptor]; renderPassDescriptor.colorAttachments[0].texture = m_NextDrawable.texture; renderPassDescriptor.colorAttachments[0].loadAction = MTLLoadActionClear; renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.0, 0.0, 0.0, 0.0); renderPassDescriptor.colorAttachments[0].storeAction = MTLStoreActionStore; - auto commandBuffer = [m_CommandQueue commandBuffer]; - auto renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; + + id commandBuffer = [m_CommandQueue commandBuffer]; + id renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; + + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + m_LumaWidth = CVPixelBufferGetWidthOfPlane(pixBuf, 0); + m_LumaHeight = CVPixelBufferGetHeightOfPlane(pixBuf, 0); + m_ChromaWidth = CVPixelBufferGetWidthOfPlane(pixBuf, 1); + m_ChromaHeight = CVPixelBufferGetHeightOfPlane(pixBuf, 1); + + // Setup the Spacial scaler for Luma texture + if(m_LumaUpscaler == nullptr){ + MTLFXSpatialScalerDescriptor* Ldescriptor = [MTLFXSpatialScalerDescriptor new]; + Ldescriptor.inputWidth = m_LumaWidth; + Ldescriptor.inputHeight = m_LumaHeight; + Ldescriptor.outputWidth = m_LastDrawableWidth; + Ldescriptor.outputHeight = m_LastDrawableHeight; + Ldescriptor.colorTextureFormat = m_LumaPixelFormart; + Ldescriptor.outputTextureFormat = m_LumaPixelFormart; + Ldescriptor.colorProcessingMode = MTLFXSpatialScalerColorProcessingModeLinear; // Linear has a better color rendering than Perceptual + m_LumaUpscaler = [Ldescriptor newSpatialScalerWithDevice:m_MetalLayer.device]; + + MTLTextureDescriptor *LtextureDescriptor = [[MTLTextureDescriptor alloc] init]; + LtextureDescriptor.pixelFormat = m_LumaPixelFormart; + LtextureDescriptor.width = m_LastDrawableWidth; + LtextureDescriptor.height = m_LastDrawableHeight; + LtextureDescriptor.storageMode = MTLStorageModePrivate; + LtextureDescriptor.usage = MTLTextureUsageShaderRead | MTLTextureUsageRenderTarget; + + m_LumaUpscaledTexture = [m_MetalLayer.device newTextureWithDescriptor:LtextureDescriptor]; + } + + // Setup the Spacial scaler for Chroma texture + if(m_ChromaUpscaler == nullptr){ + MTLFXSpatialScalerDescriptor* Cdescriptor = [MTLFXSpatialScalerDescriptor new]; + Cdescriptor.inputWidth = m_ChromaWidth; + Cdescriptor.inputHeight = m_ChromaHeight; + Cdescriptor.outputWidth = m_LastDrawableWidth; + Cdescriptor.outputHeight = m_LastDrawableHeight; + Cdescriptor.colorTextureFormat = m_ChromaPixelFormart; + Cdescriptor.outputTextureFormat = m_ChromaPixelFormart; + Cdescriptor.colorProcessingMode = MTLFXSpatialScalerColorProcessingModeLinear; // Linear has a better color rendering than Perceptual + m_ChromaUpscaler = [Cdescriptor newSpatialScalerWithDevice:m_MetalLayer.device]; + + MTLTextureDescriptor* CtextureDescriptor = [[MTLTextureDescriptor alloc] init]; + CtextureDescriptor.pixelFormat = m_ChromaPixelFormart; + CtextureDescriptor.width = m_LastDrawableWidth; + CtextureDescriptor.height = m_LastDrawableHeight; + CtextureDescriptor.storageMode = MTLStorageModePrivate; + CtextureDescriptor.usage = MTLTextureUsageShaderRead | MTLTextureUsageRenderTarget; + + m_ChromaUpscaledTexture = [m_MetalLayer.device newTextureWithDescriptor:CtextureDescriptor]; + } + } // Bind textures and buffers then draw the video region [renderEncoder setRenderPipelineState:m_VideoPipelineState]; - for (size_t i = 0; i < textures.size(); i++) { - [renderEncoder setFragmentTexture:CVMetalTextureGetTexture(textures[i]) atIndex:i]; + + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + // Use scaled textures + [renderEncoder setFragmentTexture:m_LumaUpscaledTexture atIndex:0]; + [renderEncoder setFragmentTexture:m_ChromaUpscaledTexture atIndex:1]; + } else { + [renderEncoder setFragmentTexture:m_LumaTexture atIndex:0]; + [renderEncoder setFragmentTexture:m_ChromaTexture atIndex:1]; } + [commandBuffer addCompletedHandler:^(id) { // Free textures after completion of rendering per CVMetalTextureCache requirements - for (const CVMetalTextureRef &tex : textures) { - CFRelease(tex); - } + if(m_cvLumaTexture != nullptr) CFRelease(m_cvLumaTexture); + if(m_cvChromaTexture != nullptr) CFRelease(m_cvChromaTexture); }]; + [renderEncoder setFragmentBuffer:m_CscParamsBuffer offset:0 atIndex:0]; [renderEncoder setVertexBuffer:m_VideoVertexBuffer offset:0 atIndex:0]; [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4]; @@ -507,6 +623,16 @@ virtual void renderFrame(AVFrame* frame) override }]; } + if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + m_LumaUpscaler.colorTexture = m_LumaTexture; + m_LumaUpscaler.outputTexture = m_LumaUpscaledTexture; + m_ChromaUpscaler.colorTexture = m_ChromaTexture; + m_ChromaUpscaler.outputTexture = m_ChromaUpscaledTexture; + + [m_LumaUpscaler encodeToCommandBuffer:commandBuffer]; + [m_ChromaUpscaler encodeToCommandBuffer:commandBuffer]; + } + // Flip to the newly rendered buffer [commandBuffer presentDrawable:m_NextDrawable]; [commandBuffer commit]; @@ -630,6 +756,7 @@ virtual bool initialize(PDECODER_PARAMETERS params) override int err; m_Window = params->window; + m_DecoderParams = *params; id device = getMetalDevice(); if (!device) { @@ -640,6 +767,19 @@ virtual bool initialize(PDECODER_PARAMETERS params) override return false; } + if (@available(macOS 13.0, *)) { + // Video Super Resolution from MetalFX is available starting from MacOS 13+ + m_VideoEnhancement->setVSRcapable(true); + m_VideoEnhancement->setHDRcapable(false); + // Enable the visibility of Video enhancement feature in the settings of the User interface + m_VideoEnhancement->enableUIvisible(); + } + + if(m_VideoEnhancement->isEnhancementCapable()){ + // Check if the user has enable Video enhancement + m_VideoEnhancement->enableVideoEnhancement(m_DecoderParams.enableVideoEnhancement); + } + err = av_hwdevice_ctx_create(&m_HwContext, AV_HWDEVICE_TYPE_VIDEOTOOLBOX, nullptr, @@ -706,6 +846,7 @@ virtual bool initialize(PDECODER_PARAMETERS params) override // Create a command queue for submission m_CommandQueue = [m_MetalLayer.device newCommandQueue]; + return true; }} @@ -833,6 +974,23 @@ bool notifyWindowChanged(PWINDOW_STATE_CHANGE_INFO info) override SDL_mutex* m_PresentationMutex; SDL_cond* m_PresentationCond; int m_PendingPresentationCount; + + VideoEnhancement* m_VideoEnhancement; + DECODER_PARAMETERS m_DecoderParams; + id m_LumaTexture; + id m_LumaUpscaledTexture; + id m_LumaUpscaler; + id m_ChromaTexture; + id m_ChromaUpscaledTexture; + id m_ChromaUpscaler; + size_t m_LumaWidth; + size_t m_LumaHeight; + size_t m_ChromaWidth; + size_t m_ChromaHeight; + MTLPixelFormat m_LumaPixelFormart; + MTLPixelFormat m_ChromaPixelFormart; + CVMetalTextureRef m_cvLumaTexture; + CVMetalTextureRef m_cvChromaTexture; }; IFFmpegRenderer* VTMetalRendererFactory::createRenderer() { From 1caccf881f0d00607ab68de0c48c8dbc0f9c1d91 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 15 Jun 2024 15:57:03 +0200 Subject: [PATCH 40/53] Fix crash because of CFRelease Most of the time, m_TextureCache is cleared before CFRelease is called, which makes the application crashing. --- app/streaming/video/ffmpeg-renderers/vt_metal.mm | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/vt_metal.mm b/app/streaming/video/ffmpeg-renderers/vt_metal.mm index 50dc97ecb..1b74bcfbe 100644 --- a/app/streaming/video/ffmpeg-renderers/vt_metal.mm +++ b/app/streaming/video/ffmpeg-renderers/vt_metal.mm @@ -204,9 +204,10 @@ [m_ChromaUpscaler release]; } - if (m_TextureCache != nullptr) { - CFRelease(m_TextureCache); - } + // Note: CFRelease makes the application crash sometime as the m_TextureCache seems to be cleared before it is called + // if (m_TextureCache != nullptr) { + // CFRelease(m_TextureCache); + // } if (m_MetalView != nullptr) { SDL_Metal_DestroyView(m_MetalView); From 2068cb345402d36fe812d6bb7c8a14283bff529f Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sun, 28 Jul 2024 15:21:20 +0200 Subject: [PATCH 41/53] Reapply some code optimizations related to the Video Enhancement --- .../video/ffmpeg-renderers/d3d11va.cpp | 45 +------------------ 1 file changed, 1 insertion(+), 44 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 898f09f49..1aecb491f 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -117,9 +117,6 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_AmfUpScaler(nullptr), m_AmfInitialized(false) { - RtlZeroMemory(m_OverlayVertexBuffers, sizeof(m_OverlayVertexBuffers)); - RtlZeroMemory(m_OverlayTextures, sizeof(m_OverlayTextures)); - RtlZeroMemory(m_OverlayTextureResourceViews, sizeof(m_OverlayTextureResourceViews)); RtlZeroMemory(m_VideoTextureResourceViews, sizeof(m_VideoTextureResourceViews)); m_ContextLock = SDL_CreateMutex(); @@ -135,34 +132,12 @@ D3D11VARenderer::~D3D11VARenderer() SDL_DestroyMutex(m_ContextLock); - SAFE_COM_RELEASE(m_VideoVertexBuffer); - SAFE_COM_RELEASE(m_VideoBt2020LimPixelShader); - SAFE_COM_RELEASE(m_VideoBt601LimPixelShader); - SAFE_COM_RELEASE(m_VideoGenericPixelShader); - for (int i = 0; i < ARRAYSIZE(m_VideoTextureResourceViews); i++) { SAFE_COM_RELEASE(m_VideoTextureResourceViews[i][0]); SAFE_COM_RELEASE(m_VideoTextureResourceViews[i][1]); } - SAFE_COM_RELEASE(m_VideoTexture); - - for (int i = 0; i < ARRAYSIZE(m_OverlayVertexBuffers); i++) { - SAFE_COM_RELEASE(m_OverlayVertexBuffers[i]); - } - - for (int i = 0; i < ARRAYSIZE(m_OverlayTextureResourceViews); i++) { - SAFE_COM_RELEASE(m_OverlayTextureResourceViews[i]); - } - - for (int i = 0; i < ARRAYSIZE(m_OverlayTextures); i++) { - SAFE_COM_RELEASE(m_OverlayTextures[i]); - } - - SAFE_COM_RELEASE(m_OverlayPixelShader); - SAFE_COM_RELEASE(m_RenderTargetView); - SAFE_COM_RELEASE(m_SwapChain); if (m_HwFramesContext != nullptr) { av_buffer_unref(&m_HwFramesContext); @@ -227,7 +202,6 @@ D3D11VARenderer::~D3D11VARenderer() // } #endif - SAFE_COM_RELEASE(m_Factory); } /** @@ -1316,7 +1290,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) m_SrcBox.back = 1; // Create our video textures and SRVs - if (!setupEnhancedTexture() || !setupVideoTexture() || !setupAmfTexture()) { + if (!setupEnhancedTexture() || !setupAmfTexture()) { return false; } @@ -1462,9 +1436,6 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type) SDL_assert(overlayVertexBuffer != nullptr); SDL_AtomicUnlock(&m_OverlayLock); - overlayTexture->AddRef(); - overlayVertexBuffer->AddRef(); - overlayTextureResourceView->AddRef(); // Bind vertex buffer UINT stride = sizeof(VERTEX); @@ -1477,10 +1448,6 @@ void D3D11VARenderer::renderOverlay(Overlay::OverlayType type) // Draw the overlay m_DeviceContext->DrawIndexed(6, 0, 0); - - overlayTextureResourceView->Release(); - overlayTexture->Release(); - overlayVertexBuffer->Release(); } void D3D11VARenderer::bindColorConversion(AVFrame* frame) @@ -1554,7 +1521,6 @@ void D3D11VARenderer::bindColorConversion(AVFrame* frame) HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, constantBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { m_DeviceContext->PSSetConstantBuffers(1, 1, constantBuffer.GetAddressOf()); - // constantBuffer->Release(); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, @@ -1896,10 +1862,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) m_OverlayTextureResourceViews[type] = nullptr; SDL_AtomicUnlock(&m_OverlayLock); - SAFE_COM_RELEASE(oldTextureResourceView); - SAFE_COM_RELEASE(oldTexture); - SAFE_COM_RELEASE(oldVertexBuffer); - // If the overlay is disabled, we're done if (!overlayEnabled) { SDL_FreeSurface(newSurface); @@ -1930,7 +1892,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) ComPtr newTexture; hr = m_Device->CreateTexture2D(&texDesc, &texData, newTexture.GetAddressOf()); if (FAILED(hr)) { - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateTexture2D() failed: %x", hr); @@ -1940,8 +1901,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) ComPtr newTextureResourceView; hr = m_Device->CreateShaderResourceView((ID3D11Resource*)newTexture.Get(), nullptr, newTextureResourceView.GetAddressOf()); if (FAILED(hr)) { - SAFE_COM_RELEASE(newTexture); - SDL_FreeSurface(newSurface); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateShaderResourceView() failed: %x", hr); @@ -1993,8 +1952,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) ComPtr newVertexBuffer; hr = m_Device->CreateBuffer(&vbDesc, &vbData, newVertexBuffer.GetAddressOf()); if (FAILED(hr)) { - SAFE_COM_RELEASE(newTextureResourceView); - SAFE_COM_RELEASE(newTexture); SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "ID3D11Device::CreateBuffer() failed: %x", hr); From f22d7ff673dae4c5f1a191e6532b0aeacb5c3779 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Mon, 29 Jul 2024 00:05:36 +0200 Subject: [PATCH 42/53] Fix rendering issue --- app/backend/systemproperties.cpp | 2 +- .../video/ffmpeg-renderers/d3d11va.cpp | 57 +++++++------------ 2 files changed, 22 insertions(+), 37 deletions(-) diff --git a/app/backend/systemproperties.cpp b/app/backend/systemproperties.cpp index 00e76834a..32219e0c2 100644 --- a/app/backend/systemproperties.cpp +++ b/app/backend/systemproperties.cpp @@ -260,7 +260,7 @@ void SystemProperties::refreshDisplaysInternal() bool SystemProperties::isVideoEnhancementCapable() { VideoEnhancement* videoEnhancement = &VideoEnhancement::getInstance(); - return videoEnhancement->isVSRcapable() || videoEnhancement->isHDRcapable(); + return videoEnhancement->isUIvisible() && (videoEnhancement->isVSRcapable() || videoEnhancement->isHDRcapable()); } /** diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 1aecb491f..110dec2c3 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -108,7 +108,6 @@ D3D11VARenderer::D3D11VARenderer(int decoderSelectionPass) m_LastColorTrc(AVCOL_TRC_UNSPECIFIED), m_AllowTearing(false), m_OverlayLock(0), - m_OverlayPixelShader(nullptr), m_HwDeviceContext(nullptr), m_HwFramesContext(nullptr), m_AmfContext(nullptr), @@ -139,10 +138,6 @@ D3D11VARenderer::~D3D11VARenderer() SAFE_COM_RELEASE(m_RenderTargetView); - if (m_HwFramesContext != nullptr) { - av_buffer_unref(&m_HwFramesContext); - } - // cleanup AMF instances if(m_AmfUpScaler){ // Up Scaler @@ -380,9 +375,9 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter D3D_DRIVER_TYPE_UNKNOWN, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT - #ifdef QT_DEBUG +#ifdef QT_DEBUG | D3D11_CREATE_DEVICE_DEBUG - #endif +#endif , supportedFeatureLevels, ARRAYSIZE(supportedFeatureLevels), @@ -1045,6 +1040,12 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + if(m_BindDecoderOutputTextures){ + // Disable Video enhancement as we do not copy the frame to process it + m_VideoEnhancement->enableVideoEnhancement(false); + m_VideoEnhancement->enableUIvisible(false); + } + // Set VSR and HDR if(m_VideoEnhancement->isVideoEnhancementEnabled()){ // Enable VSR feature if available @@ -1194,14 +1195,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) return false; } - // Surfaces must be 16 pixel aligned for H.264 and 128 pixel aligned for everything else - // https://github.com/FFmpeg/FFmpeg/blob/a234e5cd80224c95a205c1f3e297d8c04a1374c3/libavcodec/dxva2.c#L609-L616 - m_TextureAlignment = (params->videoFormat & VIDEO_FORMAT_MASK_H264) ? 16 : 128; - - if (!setupRenderingResources()) { - return false; - } - { m_HwDeviceContext = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_D3D11VA); if (!m_HwDeviceContext) { @@ -1231,6 +1224,10 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) } } + // Surfaces must be 16 pixel aligned for H.264 and 128 pixel aligned for everything else + // https://github.com/FFmpeg/FFmpeg/blob/a234e5cd80224c95a205c1f3e297d8c04a1374c3/libavcodec/dxva2.c#L609-L616 + m_TextureAlignment = (params->videoFormat & VIDEO_FORMAT_MASK_H264) ? 16 : 128; + { m_HwFramesContext = av_hwframe_ctx_alloc(m_HwDeviceContext); if (!m_HwFramesContext) { @@ -1851,17 +1848,6 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type) return; } - SDL_AtomicLock(&m_OverlayLock); - ComPtr oldTexture = m_OverlayTextures[type]; - m_OverlayTextures[type] = nullptr; - - ComPtr oldVertexBuffer = m_OverlayVertexBuffers[type]; - m_OverlayVertexBuffers[type] = nullptr; - - ComPtr oldTextureResourceView = m_OverlayTextureResourceViews[type]; - m_OverlayTextureResourceViews[type] = nullptr; - SDL_AtomicUnlock(&m_OverlayLock); - // If the overlay is disabled, we're done if (!overlayEnabled) { SDL_FreeSurface(newSurface); @@ -2325,12 +2311,12 @@ bool D3D11VARenderer::setupRenderingResources() float vMax = m_BindDecoderOutputTextures ? ((float)m_DecoderParams.height / FFALIGN(m_DecoderParams.height, m_TextureAlignment)) : 1.0f; VERTEX verts[] = - { - {renderRect.x, renderRect.y, 0, vMax}, - {renderRect.x, renderRect.y+renderRect.h, 0, 0}, - {renderRect.x+renderRect.w, renderRect.y, uMax, vMax}, - {renderRect.x+renderRect.w, renderRect.y+renderRect.h, uMax, 0}, - }; + { + {renderRect.x, renderRect.y, 0, vMax}, + {renderRect.x, renderRect.y+renderRect.h, 0, 0}, + {renderRect.x+renderRect.w, renderRect.y, uMax, vMax}, + {renderRect.x+renderRect.w, renderRect.y+renderRect.h, uMax, 0}, + }; D3D11_BUFFER_DESC vbDesc = {}; vbDesc.ByteWidth = sizeof(verts); @@ -2371,11 +2357,10 @@ bool D3D11VARenderer::setupRenderingResources() D3D11_SUBRESOURCE_DATA constData = {}; constData.pSysMem = chromaUVMax; - ID3D11Buffer* constantBuffer; - HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, &constantBuffer); + ComPtr constantBuffer; + HRESULT hr = m_Device->CreateBuffer(&constDesc, &constData, constantBuffer.GetAddressOf()); if (SUCCEEDED(hr)) { - m_DeviceContext->PSSetConstantBuffers(0, 1, &constantBuffer); - constantBuffer->Release(); + m_DeviceContext->PSSetConstantBuffers(0, 1, constantBuffer.GetAddressOf()); } else { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, From 81de32e84fb473dae6eb4748fd28c272af19185b Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 2 Aug 2024 23:40:41 +0200 Subject: [PATCH 43/53] For Intel GPU, force binding while using VSR --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 110dec2c3..952d1e8f8 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -351,7 +351,7 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter // Skip copying to our own internal texture on Intel GPUs due to // significant performance impact of the extra copy. See: // https://github.com/moonlight-stream/moonlight-qt/issues/1304 - m_BindDecoderOutputTextures = adapterDesc.VendorId == 0x8086; + m_BindDecoderOutputTextures = (adapterDesc.VendorId == 0x8086) && !m_VideoEnhancement->isVideoEnhancementEnabled(); } else { SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, @@ -1043,7 +1043,6 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params) if(m_BindDecoderOutputTextures){ // Disable Video enhancement as we do not copy the frame to process it m_VideoEnhancement->enableVideoEnhancement(false); - m_VideoEnhancement->enableUIvisible(false); } // Set VSR and HDR From 847763176e2b6d4acfa93e29c0566c0e80518114 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 3 Aug 2024 02:19:36 +0200 Subject: [PATCH 44/53] Clean code --- app/streaming/video/ffmpeg-renderers/d3d11va.h | 7 ------- 1 file changed, 7 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.h b/app/streaming/video/ffmpeg-renderers/d3d11va.h index a39e07292..46f80e7b8 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.h +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.h @@ -72,13 +72,6 @@ class D3D11VARenderer : public IFFmpegRenderer int m_AdapterIndex = 0; int m_OutputIndex = 0; - // Microsoft::WRL::ComPtr m_Factory; - // Microsoft::WRL::ComPtr m_SwapChain; - // // Cannot convert to ComPtr because of av_buffer_unref() - // ID3D11Device* m_Device; - // ID3D11DeviceContext* m_DeviceContext; - // ID3D11RenderTargetView* m_RenderTargetView; - Microsoft::WRL::ComPtr m_Factory; Microsoft::WRL::ComPtr m_Device; Microsoft::WRL::ComPtr m_SwapChain; From f79a57c5ccfd8e1cbb3335805e8ebdf2516e3765 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 23 Aug 2024 12:55:33 +0200 Subject: [PATCH 45/53] Submodules merge --- app/SDL_GameControllerDB | 2 +- h264bitstream/h264bitstream | 2 +- libs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/SDL_GameControllerDB b/app/SDL_GameControllerDB index e5a5fa2ac..b4001f8b2 160000 --- a/app/SDL_GameControllerDB +++ b/app/SDL_GameControllerDB @@ -1 +1 @@ -Subproject commit e5a5fa2ac6e645d72c619ea99520a3a4586ee005 +Subproject commit b4001f8b2dd55a7225c736dae043ab4e737bf0ff diff --git a/h264bitstream/h264bitstream b/h264bitstream/h264bitstream index 34f3c58af..70124d305 160000 --- a/h264bitstream/h264bitstream +++ b/h264bitstream/h264bitstream @@ -1 +1 @@ -Subproject commit 34f3c58afa3c47b6cf0a49308a68cbf89c5e0bff +Subproject commit 70124d3051ba45e6b326264f0b25e6f48a7479e7 diff --git a/libs b/libs index 66de178aa..77f06dd92 160000 --- a/libs +++ b/libs @@ -1 +1 @@ -Subproject commit 66de178aae0acb706e2580f4467cfcd1bac4c277 +Subproject commit 77f06dd92d960881c912fa8483ce2996cbdb9802 From bdf5a2710d8b8a3dc01a8f4d76c4f1db5d42cba3 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 23 Aug 2024 13:24:41 +0200 Subject: [PATCH 46/53] Code fixes --- app/streaming/session.cpp | 4 ++-- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/app/streaming/session.cpp b/app/streaming/session.cpp index b55acd0c8..c21aa313c 100644 --- a/app/streaming/session.cpp +++ b/app/streaming/session.cpp @@ -407,10 +407,10 @@ void Session::getDecoderInfo(SDL_Window* window, // that supports HDR rendering with software decoded frames. if (chooseDecoder(StreamingPreferences::VDS_FORCE_SOFTWARE, window, VIDEO_FORMAT_H265_MAIN10, 1920, 1080, 60, - false, false, true, decoder) || + false, false, false, true, decoder) || chooseDecoder(StreamingPreferences::VDS_FORCE_SOFTWARE, window, VIDEO_FORMAT_AV1_MAIN10, 1920, 1080, 60, - false, false, true, decoder)) { + false, false, false, true, decoder)) { isHdrSupported = decoder->isHdrSupported(); delete decoder; } diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 688a583e4..1c4288ed2 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -2073,7 +2073,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_H265_REXT8_444: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN_444, DXGI_FORMAT_AYUV, &supported))) + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN_444, DXGI_FORMAT_AYUV, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main 444 8-bit decoding via D3D11VA"); @@ -2087,7 +2087,7 @@ bool D3D11VARenderer::checkDecoderSupport(IDXGIAdapter* adapter) break; case VIDEO_FORMAT_H265_REXT10_444: - if (FAILED(videoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10_444, DXGI_FORMAT_Y410, &supported))) { + if (FAILED(m_VideoDevice->CheckVideoDecoderFormat(&k_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10_444, DXGI_FORMAT_Y410, &supported))) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "GPU doesn't support HEVC Main 444 10-bit decoding via D3D11VA"); return false; From f5301fb75fc72e1fbe6ae95d50f61d8cf06cb078 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 23 Aug 2024 15:42:17 +0200 Subject: [PATCH 47/53] Move pixBuf to the method scope As pixBuf is used for Video enhancement, we need to declare teh variable at the method scope instead of the condition. --- app/streaming/video/ffmpeg-renderers/vt_metal.mm | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/vt_metal.mm b/app/streaming/video/ffmpeg-renderers/vt_metal.mm index 8e1848045..bb09d4e35 100644 --- a/app/streaming/video/ffmpeg-renderers/vt_metal.mm +++ b/app/streaming/video/ffmpeg-renderers/vt_metal.mm @@ -542,8 +542,8 @@ virtual void renderFrame(AVFrame* frame) override size_t planes = getFramePlaneCount(frame); SDL_assert(planes <= MAX_VIDEO_PLANES); - if (frame->format == AV_PIX_FMT_VIDEOTOOLBOX) { - CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); + CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); + if (frame->format == AV_PIX_FMT_VIDEOTOOLBOX) { // Create Metal textures for the planes of the CVPixelBuffer for (size_t i = 0; i < planes; i++) { @@ -620,7 +620,7 @@ virtual void renderFrame(AVFrame* frame) override id commandBuffer = [m_CommandQueue commandBuffer]; id renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + if(frame->format == AV_PIX_FMT_VIDEOTOOLBOX && m_VideoEnhancement->isVideoEnhancementEnabled()){ m_LumaWidth = CVPixelBufferGetWidthOfPlane(pixBuf, 0); m_LumaHeight = CVPixelBufferGetHeightOfPlane(pixBuf, 0); m_ChromaWidth = CVPixelBufferGetWidthOfPlane(pixBuf, 1); @@ -758,7 +758,7 @@ virtual void renderFrame(AVFrame* frame) override }]; } - if(m_VideoEnhancement->isVideoEnhancementEnabled()){ + if(frame->format == AV_PIX_FMT_VIDEOTOOLBOX && m_VideoEnhancement->isVideoEnhancementEnabled()){ m_LumaUpscaler.colorTexture = m_LumaTexture; m_LumaUpscaler.outputTexture = m_LumaUpscaledTexture; m_ChromaUpscaler.colorTexture = m_ChromaTexture; From da19eea1d212bf0c5a472f361b48fc4eca055030 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Tue, 24 Sep 2024 23:40:17 +0200 Subject: [PATCH 48/53] Merge remote-tracking branch 'upstream/master' into vsr --- libs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs b/libs index 77f06dd92..a27d6a799 160000 --- a/libs +++ b/libs @@ -1 +1 @@ -Subproject commit 77f06dd92d960881c912fa8483ce2996cbdb9802 +Subproject commit a27d6a7995ef504963fa9058c69e6ba1b449cc0f From 009ad8a531461a4ed0a37ffb06992d6b5e7dc981 Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Fri, 27 Sep 2024 23:17:51 +0200 Subject: [PATCH 49/53] Fix memory leak on MacOS Removing a loop "for" which was instantiating 2 times the same variables. --- .../video/ffmpeg-renderers/vt_metal.mm | 121 +++++++++--------- 1 file changed, 59 insertions(+), 62 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/vt_metal.mm b/app/streaming/video/ffmpeg-renderers/vt_metal.mm index 228c9b425..ea833e630 100644 --- a/app/streaming/video/ffmpeg-renderers/vt_metal.mm +++ b/app/streaming/video/ffmpeg-renderers/vt_metal.mm @@ -538,75 +538,70 @@ virtual void renderFrame(AVFrame* frame) override return; } - std::array cvMetalTextures; size_t planes = getFramePlaneCount(frame); SDL_assert(planes <= MAX_VIDEO_PLANES); CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); - if (frame->format == AV_PIX_FMT_VIDEOTOOLBOX) { + if (frame->format == AV_PIX_FMT_VIDEOTOOLBOX) { // Create Metal textures for the planes of the CVPixelBuffer - for (size_t i = 0; i < planes; i++) { - switch (CVPixelBufferGetPixelFormatType(pixBuf)) { - case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: - case kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange: - case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: - case kCVPixelFormatType_444YpCbCr8BiPlanarFullRange: - case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange: - case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange: - m_LumaPixelFormart = MTLPixelFormatR8Unorm; - m_ChromaPixelFormart = MTLPixelFormatRG8Unorm; - break; - case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: - case kCVPixelFormatType_444YpCbCr10BiPlanarFullRange: - case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: - case kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange: - m_LumaPixelFormart = MTLPixelFormatR16Unorm; - m_ChromaPixelFormart = MTLPixelFormatRG16Unorm; - break; - default: - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "Unknown pixel format: %x", - CVPixelBufferGetPixelFormatType(pixBuf)); - return; - } + switch (CVPixelBufferGetPixelFormatType(pixBuf)) { + case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: + case kCVPixelFormatType_444YpCbCr8BiPlanarFullRange: + case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange: + case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange: + m_LumaPixelFormart = MTLPixelFormatR8Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG8Unorm; + break; + case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: + case kCVPixelFormatType_444YpCbCr10BiPlanarFullRange: + case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: + case kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange: + m_LumaPixelFormart = MTLPixelFormatR16Unorm; + m_ChromaPixelFormart = MTLPixelFormatRG16Unorm; + break; + default: + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "Unknown pixel format: %x", + CVPixelBufferGetPixelFormatType(pixBuf)); + return; + } - CVReturn err; - - err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, - m_TextureCache, - pixBuf, - nullptr, - m_LumaPixelFormart, - CVPixelBufferGetWidthOfPlane(pixBuf, 0), - CVPixelBufferGetHeightOfPlane(pixBuf, 0), - 0, - &m_cvLumaTexture); - if (err != kCVReturnSuccess) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "CVMetalTextureCacheCreateTextureFromImage() failed: %d", - err); - return; - } - m_LumaTexture = CVMetalTextureGetTexture(m_cvLumaTexture); + CVReturn err; + + err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_TextureCache, + pixBuf, + nullptr, + m_LumaPixelFormart, + CVPixelBufferGetWidthOfPlane(pixBuf, 0), + CVPixelBufferGetHeightOfPlane(pixBuf, 0), + 0, + &m_cvLumaTexture); + if (err != kCVReturnSuccess) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CVMetalTextureCacheCreateTextureFromImage() failed: %d", + err); + return; + } - err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, - m_TextureCache, - pixBuf, - nullptr, - m_ChromaPixelFormart, - CVPixelBufferGetWidthOfPlane(pixBuf, 1), - CVPixelBufferGetHeightOfPlane(pixBuf, 1), - 1, - &m_cvChromaTexture); - if (err != kCVReturnSuccess) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "CVMetalTextureCacheCreateTextureFromImage() failed: %d", - err); - return; - } - m_ChromaTexture = CVMetalTextureGetTexture(m_cvChromaTexture); + err = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + m_TextureCache, + pixBuf, + nullptr, + m_ChromaPixelFormart, + CVPixelBufferGetWidthOfPlane(pixBuf, 1), + CVPixelBufferGetHeightOfPlane(pixBuf, 1), + 1, + &m_cvChromaTexture); + if (err != kCVReturnSuccess) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CVMetalTextureCacheCreateTextureFromImage() failed: %d", + err); + return; } } @@ -678,9 +673,11 @@ virtual void renderFrame(AVFrame* frame) override // Use scaled textures [renderEncoder setFragmentTexture:m_LumaUpscaledTexture atIndex:0]; [renderEncoder setFragmentTexture:m_ChromaUpscaledTexture atIndex:1]; + m_LumaTexture = CVMetalTextureGetTexture(m_cvLumaTexture); + m_ChromaTexture = CVMetalTextureGetTexture(m_cvChromaTexture); } else { - [renderEncoder setFragmentTexture:m_LumaTexture atIndex:0]; - [renderEncoder setFragmentTexture:m_ChromaTexture atIndex:1]; + [renderEncoder setFragmentTexture:CVMetalTextureGetTexture(m_cvLumaTexture) atIndex:0]; + [renderEncoder setFragmentTexture:CVMetalTextureGetTexture(m_cvChromaTexture) atIndex:1]; } [commandBuffer addCompletedHandler:^(id) { From 77563e8830884f29479dfaf9677bf1bb9809f62e Mon Sep 17 00:00:00 2001 From: Bruno Martin Date: Sat, 28 Sep 2024 18:48:49 +0200 Subject: [PATCH 50/53] Fix duplicated variable --- app/streaming/video/ffmpeg-renderers/d3d11va.cpp | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp index 8887f9c60..c429808ab 100644 --- a/app/streaming/video/ffmpeg-renderers/d3d11va.cpp +++ b/app/streaming/video/ffmpeg-renderers/d3d11va.cpp @@ -367,19 +367,6 @@ bool D3D11VARenderer::createDeviceByAdapterIndex(int adapterIndex, bool* adapter goto Exit; } - bool ok; - m_BindDecoderOutputTextures = !!qEnvironmentVariableIntValue("D3D11VA_FORCE_BIND", &ok); - if (!ok) { - // Skip copying to our own internal texture on Intel GPUs due to - // significant performance impact of the extra copy. See: - // https://github.com/moonlight-stream/moonlight-qt/issues/1304 - m_BindDecoderOutputTextures = (adapterDesc.VendorId == 0x8086) && !m_VideoEnhancement->isVideoEnhancementEnabled(); - } - else { - SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, - "Using D3D11VA_FORCE_BIND to override default bind/copy logic"); - } - SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "Detected GPU %d: %S (%x:%x)", adapterIndex, From 40070d2b5a2973144768ac644b4cdab77a31b63f Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Wed, 13 Nov 2024 02:08:51 +0100 Subject: [PATCH 51/53] Disable Video enhancement when using Software video decoder As only GPU (Hardware) acceleration is leveraging video enhancement feature, we disable the enhancement when Software decoding is selected --- app/gui/SettingsView.qml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index bf0d396a0..7406647a5 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -831,6 +831,7 @@ Flickable { checked: { return SystemProperties.isVideoEnhancementCapable() && StreamingPreferences.videoEnhancement } + property bool keepValue: checked; onCheckedChanged: { StreamingPreferences.videoEnhancement = checked @@ -1559,6 +1560,16 @@ Flickable { StreamingPreferences.videoDecoderSelection = decoderListModel.get(currentIndex).val } } + onCurrentIndexChanged: { + if(decoderListModel.get(currentIndex).val === StreamingPreferences.VDS_FORCE_SOFTWARE){ + videoEnhancementCheck.enabled = false; + videoEnhancementCheck.keepValue = videoEnhancementCheck.checked; + videoEnhancementCheck.checked = false; + } else { + videoEnhancementCheck.enabled = true; + videoEnhancementCheck.checked = videoEnhancementCheck.keepValue; + } + } } Label { From fff78a2a3ed8db2d7a32553907072706cbceb91a Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Wed, 13 Nov 2024 06:42:33 +0100 Subject: [PATCH 52/53] Enable CLI command "--video-enhancement" --- app/cli/commandlineparser.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/app/cli/commandlineparser.cpp b/app/cli/commandlineparser.cpp index c55c7b0a6..dfe8e6cb8 100644 --- a/app/cli/commandlineparser.cpp +++ b/app/cli/commandlineparser.cpp @@ -367,6 +367,7 @@ void StreamCommandLineParser::parse(const QStringList &args, StreamingPreference parser.addToggleOption("game-optimization", "game optimizations"); parser.addToggleOption("audio-on-host", "audio on host PC"); parser.addToggleOption("frame-pacing", "frame pacing"); + parser.addToggleOption("video-enhancement", "Enhance video with AI"); parser.addToggleOption("mute-on-focus-loss", "mute audio when Moonlight window loses focus"); parser.addToggleOption("background-gamepad", "background gamepad input"); parser.addToggleOption("reverse-scroll-direction", "inverted scroll direction"); From 4a4895391b60a49bf61f76c44857bca08f92e0ef Mon Sep 17 00:00:00 2001 From: Bruno MARTIN Date: Sat, 16 Nov 2024 19:04:39 +0100 Subject: [PATCH 53/53] Disable VSR checkbox for unsupported GPU When the GPU doesn't support the feature, we gray out the checkbox. --- app/gui/SettingsView.qml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/gui/SettingsView.qml b/app/gui/SettingsView.qml index 7406647a5..7e393360b 100644 --- a/app/gui/SettingsView.qml +++ b/app/gui/SettingsView.qml @@ -851,6 +851,8 @@ Flickable { if (!SystemProperties.isVideoEnhancementCapable()){ // VSR or SDR->HDR feature could not be initialized by any GPU available text = qsTr("Video AI-Enhancement (Not supported by the GPU)") + enabled = false; + checked = false; } else if(SystemProperties.isVideoEnhancementExperimental()){ // Indicate if the feature is available but not officially deployed by the Vendor text = qsTr("Video AI-Enhancement (Experimental)")