d3d12: Add pipe_video_codec::process_frame implementation

Reviewed-by: Jesse Natalie <jenatali@microsoft.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/17557>
This commit is contained in:
Sil Vilerino 2022-07-15 08:03:56 -04:00 committed by Marge Bot
parent 345fd92092
commit 323cf5a68d
4 changed files with 833 additions and 0 deletions

View File

@ -34,6 +34,7 @@
#define D3D12_IGNORE_SDK_LAYERS
#include <directx/d3d12.h>
#include <directx/d3d12video.h>
#if defined(__cplusplus)
#if !defined(_WIN32) || defined(_MSC_VER) || D3D12_SDK_VERSION < 606
@ -62,6 +63,11 @@ GetAdapterLuid(ID3D12Device *dev)
{
return dev->GetAdapterLuid();
}
inline D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC
GetOutputStreamDesc(ID3D12VideoProcessor *proc)
{
return proc->GetOutputStreamDesc();
}
#else
inline D3D12_CPU_DESCRIPTOR_HANDLE
GetCPUDescriptorHandleForHeapStart(ID3D12DescriptorHeap *heap)
@ -98,6 +104,13 @@ GetAdapterLuid(ID3D12Device *dev)
dev->GetAdapterLuid(&ret);
return ret;
}
inline D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC
GetOutputStreamDesc(ID3D12VideoProcessor *proc)
{
D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC ret;
proc->GetOutputStreamDesc(&ret);
return ret;
}
#endif
#endif

View File

@ -0,0 +1,680 @@
/*
* Copyright © Microsoft Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include "d3d12_context.h"
#include "d3d12_screen.h"
#include "d3d12_video_proc.h"
#include "d3d12_residency.h"
#include "d3d12_util.h"
#include "d3d12_resource.h"
#include "d3d12_video_buffer.h"
void
d3d12_video_processor_begin_frame(struct pipe_video_codec * codec,
struct pipe_video_buffer *target,
struct pipe_picture_desc *picture)
{
struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
debug_printf("[d3d12_video_processor] d3d12_video_processor_begin_frame - "
"fenceValue: %d\n",
pD3D12Proc->m_fenceValue);
// Setup process frame arguments for output/target texture.
struct d3d12_video_buffer *pOutputVideoBuffer = (struct d3d12_video_buffer *) target;
// Make the resources permanently resident for video use
d3d12_promote_to_permanent_residency(pD3D12Proc->m_pD3D12Screen, pOutputVideoBuffer->texture);
ID3D12Resource *pDstD3D12Res = d3d12_resource_resource(pOutputVideoBuffer->texture);
auto dstDesc = GetDesc(pDstD3D12Res);
pD3D12Proc->m_OutputArguments = {
{
{
pDstD3D12Res, // ID3D12Resource *pTexture2D;
0, // UINT Subresource;
},
{
NULL, // ID3D12Resource *pTexture2D;
0 // UINT Subresource;
}
},
{ 0, 0, (int) dstDesc.Width, (int) dstDesc.Height }
};
debug_printf("d3d12_video_processor_begin_frame: Beginning new scene with Output ID3D12Resource: %p (%d %d)\n", pDstD3D12Res, (int) dstDesc.Width, (int) dstDesc.Height);
}
void
d3d12_video_processor_end_frame(struct pipe_video_codec * codec,
struct pipe_video_buffer *target,
struct pipe_picture_desc *picture)
{
struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - "
"fenceValue: %d\n",
pD3D12Proc->m_fenceValue);
auto curOutputDesc = GetOutputStreamDesc(pD3D12Proc->m_spVideoProcessor.Get());
auto curOutputTexFmt = GetDesc(pD3D12Proc->m_OutputArguments.OutputStream[0].pTexture2D).Format;
bool inputFmtsMatch = pD3D12Proc->m_inputStreamDescs.size() == pD3D12Proc->m_ProcessInputs.size();
unsigned curInputIdx = 0;
while( (curInputIdx < pD3D12Proc->m_inputStreamDescs.size()) && inputFmtsMatch)
{
inputFmtsMatch = inputFmtsMatch && (pD3D12Proc->m_inputStreamDescs[curInputIdx].Format == GetDesc(pD3D12Proc->m_ProcessInputs[curInputIdx].InputStream[0].pTexture2D).Format);
curInputIdx++;
}
bool inputCountMatches = (pD3D12Proc->m_ProcessInputs.size() == pD3D12Proc->m_spVideoProcessor->GetNumInputStreamDescs());
bool outputFmtMatches = (curOutputDesc.Format == curOutputTexFmt);
bool needsVPRecreation = (
!inputCountMatches // Requested batch has different number of Inputs to be blit'd
|| !outputFmtMatches // output texture format different than vid proc object expects
|| !inputFmtsMatch // inputs texture formats different than vid proc object expects
);
if(needsVPRecreation) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - Attempting to re-create ID3D12VideoProcessor "
"input count matches %d inputFmtsMatch: %d outputFmtsMatch %d \n", inputCountMatches, inputFmtsMatch, outputFmtMatches);
DXGI_COLOR_SPACE_TYPE InputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
DXGI_FORMAT OutputFormat = curOutputTexFmt;
DXGI_COLOR_SPACE_TYPE OutputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
std::vector<DXGI_FORMAT> InputFormats;
for(D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 curInput : pD3D12Proc->m_ProcessInputs)
{
InputFormats.push_back(GetDesc(curInput.InputStream[0].pTexture2D).Format);
}
if(!d3d12_video_processor_check_caps_and_create_processor(pD3D12Proc, InputFormats, InputColorSpace, OutputFormat, OutputColorSpace))
{
debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - Failure when "
" trying to re-create the ID3D12VideoProcessor for current batch streams configuration\n");
assert(false);
}
}
// Schedule barrier transitions
std::vector<D3D12_RESOURCE_BARRIER> barrier_transitions;
barrier_transitions.push_back(CD3DX12_RESOURCE_BARRIER::Transition(
pD3D12Proc->m_OutputArguments.OutputStream[0].pTexture2D,
D3D12_RESOURCE_STATE_COMMON,
D3D12_RESOURCE_STATE_VIDEO_PROCESS_WRITE));
for(D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 curInput : pD3D12Proc->m_ProcessInputs)
barrier_transitions.push_back(CD3DX12_RESOURCE_BARRIER::Transition(
curInput.InputStream[0].pTexture2D,
D3D12_RESOURCE_STATE_COMMON,
D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ));
pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<uint32_t>(barrier_transitions.size()), barrier_transitions.data());
// Schedule process operation
pD3D12Proc->m_spCommandList->ProcessFrames1(pD3D12Proc->m_spVideoProcessor.Get(), &pD3D12Proc->m_OutputArguments, pD3D12Proc->m_ProcessInputs.size(), pD3D12Proc->m_ProcessInputs.data());
// Schedule reverse (back to common) transitions before command list closes for current frame
for (auto &BarrierDesc : barrier_transitions)
std::swap(BarrierDesc.Transition.StateBefore, BarrierDesc.Transition.StateAfter);
pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<uint32_t>(barrier_transitions.size()), barrier_transitions.data());
}
void
d3d12_video_processor_process_frame(struct pipe_video_codec *codec,
struct pipe_video_buffer *input_texture,
const struct pipe_vpp_desc *process_properties)
{
struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
// Get the underlying resources from the pipe_video_buffers
struct d3d12_video_buffer *pInputVideoBuffer = (struct d3d12_video_buffer *) input_texture;
// Make the resources permanently resident for video use
d3d12_promote_to_permanent_residency(pD3D12Proc->m_pD3D12Screen, pInputVideoBuffer->texture);
ID3D12Resource *pSrcD3D12Res = d3d12_resource_resource(pInputVideoBuffer->texture);
// y0 = top
// x0 = left
// x1 = right
// y1 = bottom
debug_printf("d3d12_video_processor_process_frame: Adding Input ID3D12Resource: %p to scene (Output target %p)\n", pSrcD3D12Res, pD3D12Proc->m_OutputArguments.OutputStream[0].pTexture2D);
debug_printf("d3d12_video_processor_process_frame: Input box: top: %d left: %d right: %d bottom: %d\n", process_properties->src_region.y0, process_properties->src_region.x0, process_properties->src_region.x1, process_properties->src_region.y1);
debug_printf("d3d12_video_processor_process_frame: Output box: top: %d left: %d right: %d bottom: %d\n", process_properties->dst_region.y0, process_properties->dst_region.x0, process_properties->dst_region.x1, process_properties->dst_region.y1);
debug_printf("d3d12_video_processor_process_frame: Requested alpha blend mode %d global alpha: %f \n", process_properties->blend.mode, process_properties->blend.global_alpha);
// Setup process frame arguments for current input texture.
unsigned curInputStreamIndex = pD3D12Proc->m_ProcessInputs.size();
D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 InputArguments = {
{
{ // D3D12_VIDEO_PROCESS_INPUT_STREAM InputStream[0];
pSrcD3D12Res, // ID3D12Resource *pTexture2D;
0, // UINT Subresource
{//D3D12_VIDEO_PROCESS_REFERENCE_SET ReferenceSet;
0, //UINT NumPastFrames;
NULL, //ID3D12Resource **ppPastFrames;
NULL, // UINT *pPastSubresources;
0, //UINT NumFutureFrames;
NULL, //ID3D12Resource **ppFutureFrames;
NULL //UINT *pFutureSubresources;
}
},
{ // D3D12_VIDEO_PROCESS_INPUT_STREAM InputStream[1];
NULL, //ID3D12Resource *pTexture2D;
0, //UINT Subresource;
{//D3D12_VIDEO_PROCESS_REFERENCE_SET ReferenceSet;
0, //UINT NumPastFrames;
NULL, //ID3D12Resource **ppPastFrames;
NULL, // UINT *pPastSubresources;
0, //UINT NumFutureFrames;
NULL, //ID3D12Resource **ppFutureFrames;
NULL //UINT *pFutureSubresources;
}
}
},
{ // D3D12_VIDEO_PROCESS_TRANSFORM Transform;
// y0 = top
// x0 = left
// x1 = right
// y1 = bottom
// typedef struct _RECT
// {
// int left;
// int top;
// int right;
// int bottom;
// } RECT;
{ process_properties->src_region.x0/*left*/, process_properties->src_region.y0/*top*/, process_properties->src_region.x1/*right*/, process_properties->src_region.y1/*bottom*/ },
{ process_properties->dst_region.x0/*left*/, process_properties->dst_region.y0/*top*/, process_properties->dst_region.x1/*right*/, process_properties->dst_region.y1/*bottom*/ }, // D3D12_RECT DestinationRectangle;
pD3D12Proc->m_inputStreamDescs[curInputStreamIndex].EnableOrientation ? d3d12_video_processor_convert_pipe_rotation(process_properties->orientation) : D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT, // D3D12_VIDEO_PROCESS_ORIENTATION Orientation;
},
D3D12_VIDEO_PROCESS_INPUT_STREAM_FLAG_NONE,
{ // D3D12_VIDEO_PROCESS_INPUT_STREAM_RATE RateInfo;
0,
0,
},
// INT FilterLevels[32];
{
0, // Trailing zeroes on the rest
},
//D3D12_VIDEO_PROCESS_ALPHA_BLENDING;
{
(process_properties->blend.mode == PIPE_VIDEO_VPP_BLEND_MODE_GLOBAL_ALPHA),
process_properties->blend.global_alpha
},
// D3D12_VIDEO_FIELD_TYPE FieldType
D3D12_VIDEO_FIELD_TYPE_NONE,
};
debug_printf("ProcessFrame InArgs Orientation %d \n\tSrc top: %d left: %d right: %d bottom: %d\n\tDst top: %d left: %d right: %d bottom: %d\n", InputArguments.Transform.Orientation,
InputArguments.Transform.SourceRectangle.top, InputArguments.Transform.SourceRectangle.left, InputArguments.Transform.SourceRectangle.right, InputArguments.Transform.SourceRectangle.bottom,
InputArguments.Transform.DestinationRectangle.top, InputArguments.Transform.DestinationRectangle.left, InputArguments.Transform.DestinationRectangle.right, InputArguments.Transform.DestinationRectangle.bottom);
pD3D12Proc->m_ProcessInputs.push_back(InputArguments);
///
/// Flush work to the GPU and blocking wait until GPU finishes
///
pD3D12Proc->m_needsGPUFlush = true;
}
void
d3d12_video_processor_destroy(struct pipe_video_codec * codec)
{
if (codec == nullptr) {
return;
}
d3d12_video_processor_flush(codec); // Flush pending work before destroying.
// Call dtor to make ComPtr work
struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
delete pD3D12Proc;
}
void
d3d12_video_processor_flush(struct pipe_video_codec * codec)
{
struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
assert(pD3D12Proc);
assert(pD3D12Proc->m_spD3D12VideoDevice);
assert(pD3D12Proc->m_spCommandQueue);
// Flush buffer_subdata batch and Wait the m_spCommandQueue for GPU upload completion
// before executing the current batch below. Input objects coming from the pipe_context (ie. input texture) must be fully finished working with before processor can read them.
struct pipe_fence_handle *completion_fence = NULL;
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Flushing pD3D12Proc->m_pD3D12Context->base. and GPU sync between Video/Context queues before flushing Video Process Queue.\n");
pD3D12Proc->m_pD3D12Context->base.flush(&pD3D12Proc->m_pD3D12Context->base, &completion_fence, PIPE_FLUSH_ASYNC | PIPE_FLUSH_HINT_FINISH);
assert(completion_fence);
struct d3d12_fence *casted_completion_fence = d3d12_fence(completion_fence);
pD3D12Proc->m_spCommandQueue->Wait(casted_completion_fence->cmdqueue_fence, casted_completion_fence->value);
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush started. Will flush video queue work and CPU wait on "
"fenceValue: %d\n",
pD3D12Proc->m_fenceValue);
if (!pD3D12Proc->m_needsGPUFlush) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush started. Nothing to flush, all up to date.\n");
} else {
HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
if (hr != S_OK) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush"
" - D3D12Device was removed BEFORE commandlist "
"execution with HR %x.\n",
hr);
goto flush_fail;
}
// Close and execute command list and wait for idle on CPU blocking
// this method before resetting list and allocator for next submission.
if (pD3D12Proc->m_transitionsBeforeCloseCmdList.size() > 0) {
pD3D12Proc->m_spCommandList->ResourceBarrier(pD3D12Proc->m_transitionsBeforeCloseCmdList.size(),
pD3D12Proc->m_transitionsBeforeCloseCmdList.data());
pD3D12Proc->m_transitionsBeforeCloseCmdList.clear();
}
hr = pD3D12Proc->m_spCommandList->Close();
if (FAILED(hr)) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Can't close command list with HR %x\n", hr);
goto flush_fail;
}
ID3D12CommandList *ppCommandLists[1] = { pD3D12Proc->m_spCommandList.Get() };
pD3D12Proc->m_spCommandQueue->ExecuteCommandLists(1, ppCommandLists);
pD3D12Proc->m_spCommandQueue->Signal(pD3D12Proc->m_spFence.Get(), pD3D12Proc->m_fenceValue);
pD3D12Proc->m_spFence->SetEventOnCompletion(pD3D12Proc->m_fenceValue, nullptr);
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - ExecuteCommandLists finished on signal with "
"fenceValue: %d\n",
pD3D12Proc->m_fenceValue);
hr = pD3D12Proc->m_spCommandAllocator->Reset();
if (FAILED(hr)) {
debug_printf(
"[d3d12_video_processor] d3d12_video_processor_flush - resetting ID3D12CommandAllocator failed with HR %x\n",
hr);
goto flush_fail;
}
hr = pD3D12Proc->m_spCommandList->Reset(pD3D12Proc->m_spCommandAllocator.Get());
if (FAILED(hr)) {
debug_printf(
"[d3d12_video_processor] d3d12_video_processor_flush - resetting ID3D12GraphicsCommandList failed with HR %x\n",
hr);
goto flush_fail;
}
// Validate device was not removed
hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
if (hr != S_OK) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush"
" - D3D12Device was removed AFTER commandlist "
"execution with HR %x, but wasn't before.\n",
hr);
goto flush_fail;
}
debug_printf(
"[d3d12_video_processor] d3d12_video_processor_flush - GPU signaled execution finalized for fenceValue: %d\n",
pD3D12Proc->m_fenceValue);
pD3D12Proc->m_fenceValue++;
pD3D12Proc->m_needsGPUFlush = false;
}
pD3D12Proc->m_ProcessInputs.clear();
// Free the fence after completion finished
if(completion_fence)
pD3D12Proc->m_pD3D12Screen->base.fence_reference(&pD3D12Proc->m_pD3D12Screen->base, &completion_fence, NULL);
return;
flush_fail:
debug_printf("[d3d12_video_processor] d3d12_video_processor_flush failed for fenceValue: %d\n", pD3D12Proc->m_fenceValue);
assert(false);
}
struct pipe_video_codec *
d3d12_video_processor_create(struct pipe_context *context, const struct pipe_video_codec *codec)
{
///
/// Initialize d3d12_video_processor
///
// Not using new doesn't call ctor and the initializations in the class declaration are lost
struct d3d12_video_processor *pD3D12Proc = new d3d12_video_processor;
pD3D12Proc->base = *codec;
pD3D12Proc->base.context = context;
pD3D12Proc->base.width = codec->width;
pD3D12Proc->base.height = codec->height;
pD3D12Proc->base.destroy = d3d12_video_processor_destroy;
pD3D12Proc->base.begin_frame = d3d12_video_processor_begin_frame;
pD3D12Proc->base.process_frame = d3d12_video_processor_process_frame;
pD3D12Proc->base.end_frame = d3d12_video_processor_end_frame;
pD3D12Proc->base.flush = d3d12_video_processor_flush;
///
///
/// Try initializing D3D12 Video device and check for device caps
///
struct d3d12_context *pD3D12Ctx = (struct d3d12_context *) context;
pD3D12Proc->m_pD3D12Context = pD3D12Ctx;
pD3D12Proc->m_pD3D12Screen = d3d12_screen(pD3D12Ctx->base.screen);
// Assume defaults for now, can re-create if necessary when d3d12_video_processor_end_frame kicks off the processing
DXGI_COLOR_SPACE_TYPE InputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
std::vector<DXGI_FORMAT> InputFormats = { DXGI_FORMAT_NV12 };
DXGI_FORMAT OutputFormat = DXGI_FORMAT_NV12;
DXGI_COLOR_SPACE_TYPE OutputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
///
/// Create processor objects
///
if (FAILED(pD3D12Proc->m_pD3D12Screen->dev->QueryInterface(
IID_PPV_ARGS(pD3D12Proc->m_spD3D12VideoDevice.GetAddressOf())))) {
debug_printf("[d3d12_video_processor] d3d12_video_create_processor - D3D12 Device has no Video support\n");
goto failed;
}
if (!d3d12_video_processor_check_caps_and_create_processor(pD3D12Proc, InputFormats, InputColorSpace, OutputFormat, OutputColorSpace)) {
debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Failure on "
"d3d12_video_processor_check_caps_and_create_processor\n");
goto failed;
}
if (!d3d12_video_processor_create_command_objects(pD3D12Proc)) {
debug_printf(
"[d3d12_video_processor] d3d12_video_create_processor - Failure on d3d12_video_processor_create_command_objects\n");
goto failed;
}
debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Created successfully!\n");
return &pD3D12Proc->base;
failed:
if (pD3D12Proc != nullptr) {
d3d12_video_processor_destroy(&pD3D12Proc->base);
}
return nullptr;
}
bool
d3d12_video_processor_check_caps_and_create_processor(struct d3d12_video_processor *pD3D12Proc,
std::vector<DXGI_FORMAT> InputFormats,
DXGI_COLOR_SPACE_TYPE InputColorSpace,
DXGI_FORMAT OutputFormat,
DXGI_COLOR_SPACE_TYPE OutputColorSpace)
{
HRESULT hr = S_OK;
D3D12_VIDEO_FIELD_TYPE FieldType = D3D12_VIDEO_FIELD_TYPE_NONE;
D3D12_VIDEO_FRAME_STEREO_FORMAT StereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
DXGI_RATIONAL FrameRate = { 30, 1 };
DXGI_RATIONAL AspectRatio = { 1, 1 };
struct ResolStruct {
uint Width;
uint Height;
};
ResolStruct resolutionsList[] = {
{ 8192, 8192 }, // 8k
{ 8192, 4320 }, // 8k - alternative
{ 7680, 4800 }, // 8k - alternative
{ 7680, 4320 }, // 8k - alternative
{ 4096, 2304 }, // 2160p (4K)
{ 4096, 2160 }, // 2160p (4K) - alternative
{ 2560, 1440 }, // 1440p
{ 1920, 1200 }, // 1200p
{ 1920, 1080 }, // 1080p
{ 1280, 720 }, // 720p
{ 800, 600 },
};
pD3D12Proc->m_SupportCaps =
{
0, // NodeIndex
{ resolutionsList[0].Width, resolutionsList[0].Height, { InputFormats[0], InputColorSpace } },
FieldType,
StereoFormat,
FrameRate,
{ OutputFormat, OutputColorSpace },
StereoFormat,
FrameRate,
};
uint32_t idxResol = 0;
bool bSupportsAny = false;
while ((idxResol < ARRAY_SIZE(resolutionsList)) && !bSupportsAny) {
pD3D12Proc->m_SupportCaps.InputSample.Width = resolutionsList[idxResol].Width;
pD3D12Proc->m_SupportCaps.InputSample.Height = resolutionsList[idxResol].Height;
if (SUCCEEDED(pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_SUPPORT, &pD3D12Proc->m_SupportCaps, sizeof(pD3D12Proc->m_SupportCaps)))) {
bSupportsAny = ((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != 0);
}
idxResol++;
}
if ((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED)
{
if((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED not returned by driver. "
"failed with SupportFlags %x\n",
pD3D12Proc->m_SupportCaps.SupportFlags);
}
}
D3D12_VIDEO_PROCESS_FILTER_FLAGS enabledFilterFlags = D3D12_VIDEO_PROCESS_FILTER_FLAG_NONE;
bool enableOrientation = (
((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION) != 0)
|| ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP) != 0)
);
D3D12_VIDEO_PROCESS_INPUT_STREAM_DESC inputStreamDesc = {
InputFormats[0],
InputColorSpace,
AspectRatio, // SourceAspectRatio;
AspectRatio, // DestinationAspectRatio;
FrameRate, // FrameRate
pD3D12Proc->m_SupportCaps.ScaleSupport.OutputSizeRange, // SourceSizeRange
pD3D12Proc->m_SupportCaps.ScaleSupport.OutputSizeRange, // DestinationSizeRange
enableOrientation,
enabledFilterFlags,
StereoFormat,
FieldType,
D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_NONE,
((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING) != 0)
&& ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL) != 0), // EnableAlphaBlending
{}, // LumaKey
0, // NumPastFrames
0, // NumFutureFrames
FALSE // EnableAutoProcessing
};
D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC outputStreamDesc =
{
pD3D12Proc->m_SupportCaps.OutputFormat.Format,
OutputColorSpace,
D3D12_VIDEO_PROCESS_ALPHA_FILL_MODE_OPAQUE, // AlphaFillMode
0u, // AlphaFillModeSourceStreamIndex
{0, 0, 0, 0}, // BackgroundColor
FrameRate, // FrameRate
FALSE // EnableStereo
};
// gets the required past/future frames for VP creation
{
D3D12_FEATURE_DATA_VIDEO_PROCESS_REFERENCE_INFO referenceInfo = {};
referenceInfo.NodeIndex = 0;
D3D12_VIDEO_PROCESS_FEATURE_FLAGS featureFlags = D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
featureFlags |= outputStreamDesc.AlphaFillMode ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
featureFlags |= inputStreamDesc.LumaKey.Enable ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_LUMA_KEY : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
featureFlags |= (inputStreamDesc.StereoFormat != D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE || outputStreamDesc.EnableStereo) ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_STEREO : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
featureFlags |= inputStreamDesc.EnableOrientation ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION | D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
featureFlags |= inputStreamDesc.EnableAlphaBlending ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
referenceInfo.DeinterlaceMode = inputStreamDesc.DeinterlaceMode;
referenceInfo.Filters = inputStreamDesc.FilterFlags;
referenceInfo.FeatureSupport = featureFlags;
referenceInfo.InputFrameRate = inputStreamDesc.FrameRate;
referenceInfo.OutputFrameRate = outputStreamDesc.FrameRate;
referenceInfo.EnableAutoProcessing = inputStreamDesc.EnableAutoProcessing;
hr = pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_REFERENCE_INFO, &referenceInfo, sizeof(referenceInfo));
if (FAILED(hr)) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - CheckFeatureSupport "
"failed with HR %x\n",
hr);
return false;
}
inputStreamDesc.NumPastFrames = referenceInfo.PastFrames;
inputStreamDesc.NumFutureFrames = referenceInfo.FutureFrames;
}
pD3D12Proc->m_outputStreamDesc = outputStreamDesc;
debug_printf("[d3d12_video_processor]\t Creating Video Processor\n");
debug_printf("[d3d12_video_processor]\t NumInputs: %d\n", (int) InputFormats.size());
pD3D12Proc->m_inputStreamDescs.clear();
for (unsigned i = 0; i < InputFormats.size(); i++)
{
inputStreamDesc.Format = InputFormats[i];
pD3D12Proc->m_inputStreamDescs.push_back(inputStreamDesc);
debug_printf("[d3d12_video_processor]\t Input Stream #%d Format: %d\n", i, inputStreamDesc.Format);
}
debug_printf("[d3d12_video_processor]\t Output Stream Format: %d\n", pD3D12Proc->m_outputStreamDesc.Format);
hr = pD3D12Proc->m_spD3D12VideoDevice->CreateVideoProcessor(pD3D12Proc->m_NodeMask,
&pD3D12Proc->m_outputStreamDesc,
pD3D12Proc->m_inputStreamDescs.size(),
pD3D12Proc->m_inputStreamDescs.data(),
IID_PPV_ARGS(pD3D12Proc->m_spVideoProcessor.GetAddressOf()));
if (FAILED(hr)) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - CreateVideoProcessor "
"failed with HR %x\n",
hr);
return false;
}
return true;
}
bool
d3d12_video_processor_create_command_objects(struct d3d12_video_processor *pD3D12Proc)
{
assert(pD3D12Proc->m_spD3D12VideoDevice);
D3D12_COMMAND_QUEUE_DESC commandQueueDesc = { D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS };
HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandQueue(
&commandQueueDesc,
IID_PPV_ARGS(pD3D12Proc->m_spCommandQueue.GetAddressOf()));
if (FAILED(hr)) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateCommandQueue "
"failed with HR %x\n",
hr);
return false;
}
hr = pD3D12Proc->m_pD3D12Screen->dev->CreateFence(0,
D3D12_FENCE_FLAG_NONE,
IID_PPV_ARGS(&pD3D12Proc->m_spFence));
if (FAILED(hr)) {
debug_printf(
"[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateFence failed with HR %x\n",
hr);
return false;
}
hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandAllocator(
D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
IID_PPV_ARGS(pD3D12Proc->m_spCommandAllocator.GetAddressOf()));
if (FAILED(hr)) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to "
"CreateCommandAllocator failed with HR %x\n",
hr);
return false;
}
hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandList(0,
D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
pD3D12Proc->m_spCommandAllocator.Get(),
nullptr,
IID_PPV_ARGS(pD3D12Proc->m_spCommandList.GetAddressOf()));
if (FAILED(hr)) {
debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateCommandList "
"failed with HR %x\n",
hr);
return false;
}
return true;
}
D3D12_VIDEO_PROCESS_ORIENTATION
d3d12_video_processor_convert_pipe_rotation(enum pipe_video_vpp_orientation orientation_flags)
{
D3D12_VIDEO_PROCESS_ORIENTATION result = D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT;
if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_90)
{
result = (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90_FLIP_HORIZONTAL : D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90;
debug_printf("d3d12_video_processor_process_frame: Orientation Mode: %s\n", (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90_FLIP_HORIZONTAL" : "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90");
}
else if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_180)
{
result = D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_180;
debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_180\n");
}
else if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_270)
{
result = (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270_FLIP_HORIZONTAL : D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270;
debug_printf("d3d12_video_processor_process_frame: Orientation Mode: %s\n", (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270_FLIP_HORIZONTAL" : "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270");
}
else if(orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL)
{
result = D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_HORIZONTAL;
debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_HORIZONTAL\n");
}
else if(orientation_flags & PIPE_VIDEO_VPP_FLIP_VERTICAL)
{
result = D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_VERTICAL;
debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_VERTICAL\n");
}
return result;
}

View File

@ -0,0 +1,139 @@
/*
* Copyright © Microsoft Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef D3D12_VIDEO_PROC_H
#define D3D12_VIDEO_PROC_H
#include "d3d12_video_types.h"
///
/// Pipe video interface starts
///
/**
* creates a video processor
*/
struct pipe_video_codec *
d3d12_video_create_processor(struct pipe_context *context, const struct pipe_video_codec *templ);
/**
* destroy this video processor
*/
void
d3d12_video_processor_destroy(struct pipe_video_codec *codec);
/**
* start processing of a new frame
*/
void
d3d12_video_processor_begin_frame(struct pipe_video_codec * codec,
struct pipe_video_buffer *target,
struct pipe_picture_desc *picture);
/**
* Perform post-process effect
*/
void
d3d12_video_processor_process_frame(struct pipe_video_codec *codec,
struct pipe_video_buffer *input_texture,
const struct pipe_vpp_desc *process_properties);
/**
* end processing of the current frame
*/
void
d3d12_video_processor_end_frame(struct pipe_video_codec * codec,
struct pipe_video_buffer *target,
struct pipe_picture_desc *picture);
/**
* flush any outstanding command buffers to the hardware
* should be called before a video_buffer is acessed by the gallium frontend again
*/
void
d3d12_video_processor_flush(struct pipe_video_codec *codec);
///
/// Pipe video interface ends
///
///
/// d3d12_video_processor functions starts
///
struct d3d12_video_processor
{
struct pipe_video_codec base;
struct d3d12_screen *m_pD3D12Screen;
struct d3d12_context *m_pD3D12Context;
///
/// D3D12 objects and context info
///
const uint m_NodeMask = 0u;
const uint m_NodeIndex = 0u;
ComPtr<ID3D12Fence> m_spFence;
uint m_fenceValue = 1u;
ComPtr<ID3D12VideoDevice> m_spD3D12VideoDevice;
D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT m_SupportCaps;
D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC m_outputStreamDesc;
std::vector<D3D12_VIDEO_PROCESS_INPUT_STREAM_DESC> m_inputStreamDescs;
ComPtr<ID3D12VideoProcessor1> m_spVideoProcessor;
ComPtr<ID3D12CommandQueue> m_spCommandQueue;
ComPtr<ID3D12CommandAllocator> m_spCommandAllocator;
ComPtr<ID3D12VideoProcessCommandList1> m_spCommandList;
std::vector<D3D12_RESOURCE_BARRIER> m_transitionsBeforeCloseCmdList;
// Current state between begin and end frame
D3D12_VIDEO_PROCESS_OUTPUT_STREAM_ARGUMENTS m_OutputArguments;
std::vector<D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1> m_ProcessInputs;
// Indicates if GPU commands have not been flushed and are pending.
bool m_needsGPUFlush = false;
};
struct pipe_video_codec *
d3d12_video_processor_create(struct pipe_context *context, const struct pipe_video_codec *codec);
bool
d3d12_video_processor_check_caps_and_create_processor(struct d3d12_video_processor *pD3D12Proc,
std::vector<DXGI_FORMAT> InputFormats,
DXGI_COLOR_SPACE_TYPE InputColorSpace,
DXGI_FORMAT OutputFormat,
DXGI_COLOR_SPACE_TYPE OutputColorSpace);
bool
d3d12_video_processor_create_command_objects(struct d3d12_video_processor *pD3D12Proc);
D3D12_VIDEO_PROCESS_ORIENTATION
d3d12_video_processor_convert_pipe_rotation(enum pipe_video_vpp_orientation orientation);
///
/// d3d12_video_processor functions ends
///
#endif

View File

@ -62,6 +62,7 @@ if with_gallium_d3d12_video
'd3d12_video_texture_array_dpb_manager.cpp',
'd3d12_video_array_of_textures_dpb_manager.cpp',
'd3d12_video_screen.cpp',
'd3d12_video_proc.cpp',
]
endif