Resize D3D11Texture2D DirectX 11 - directx

I would like to resize a D3D11Texture2D to make it smaller. For example I have a texture in 1920x1080 and I would like to scale it 1280x720 for example.
Just so you know I'm not drawing at all, I just want to get the byte buffer scaled. Here is my code :
if (mRealTexture == nullptr) {
D3D11_TEXTURE2D_DESC description;
texture2D->GetDesc(&description);
description.BindFlags = 0;
description.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
description.Usage = D3D11_USAGE_STAGING;
description.MiscFlags = 0;
hr = mDevice->CreateTexture2D(&description, NULL, &mRealTexture);
if (FAILED(hr)) {
if (mRealTexture) {
mRealTexture->Release();
mRealTexture = nullptr;
}
return NULL;
}
}
mImmediateContext->CopyResource(mRealTexture, texture2D);
if (mScaledTexture == nullptr) {
D3D11_TEXTURE2D_DESC description;
texture2D->GetDesc(&description);
description.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
description.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
description.Width = 1440;
description.Height = 585;
description.MipLevels = 4;
description.ArraySize = 1;
description.SampleDesc.Count = 1;
description.SampleDesc.Quality = 0;
description.Usage = D3D11_USAGE_DEFAULT;
hr = mDevice->CreateTexture2D(&description, NULL, &mScaledTexture);
if (FAILED(hr)) {
if (mScaledTexture) {
mScaledTexture->Release();
mScaledTexture = nullptr;
}
return NULL;
}
} //I want to copy the mRealTexture on the mScaledTexture, map the scaledTexture and get the buffer.
Thanks for help

Having thought about this and that you are left with only a couple of options.
1 - You create a viewport etc and render to your target size with a full screen quad. Which I get the feeling you don't want to do.
2 - You roll your own scaling which isn't too bad and scale texture as you copy the data from one buffer to another.
Option 2 isn't too bad, the roughest scaling would be reading a points based on the scaling ratio, but a more accurate version would be to average a number of samples based a weighted grid (the weights need to be recalculated for each pixel you visit on your target.)

Related

CreateTexture2D returns black image

I am trying to make a desktop recorder, but all i get, is black screen, i have no clue why at all.
I tried with dx9, but same thing when i use backbuffer, front buffer method does work, and it can capture the frames correctly, but it's too slow (33ms per frame, and all because of GetFrontBuffer).
So i decided to try with dx11, there are no errors from return, no errors when creating swapchain and device, everything is fine, and in fact the frames are captured(i measure the time and fps, and something is going on), but they are all black, like it's not coming from the desktop, but from somewhere else.
This is the capture method
if(contains_errors()){return;}
m_swap_chain->GetBuffer(0, __uuidof(ID3D11Resource), (void**)&m_back_buffer_ptr);
return_if_null(m_back_buffer_ptr);
HRESULT hr = m_back_buffer_ptr->QueryInterface(__uuidof(ID3D11Resource), (void**)&m_back_buffer_data);
return_if_failed(hr);
hr = m_swap_chain->GetDevice(__uuidof(ID3D11Device), (void**)&m_device);
return_if_failed(hr);
hr = m_swap_chain->GetDesc(&m_desc);
return_if_failed(hr);
ID3D11Texture2D* texture = nullptr;
hr = m_device->CreateTexture2D(&m_tex_desc, 0, &texture);
return_if_failed(hr);
ID3D11DeviceContext* context = nullptr;
m_device->GetImmediateContext(&context);
return_if_null(context);
context->CopyResource(texture, m_back_buffer_data);
D3D11_MAPPED_SUBRESOURCE map_subres = {0, 0, 0};
hr = context->Map(texture, 0, D3D11_MAP_READ, 0, &map_subres);
return_if_failed(hr);
if(m_current_frame == 0)
{
m_current_frame = new BYTE[map_subres.DepthPitch];
}
memcpy(m_current_frame, map_subres.pData, map_subres.DepthPitch);
texture->Release();
m_device->Release();
This is the texture desc setup
ZeroMemory(&m_tex_desc, sizeof(m_tex_desc));
m_tex_desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
m_tex_desc.Width = m_desc.BufferDesc.Width;
m_tex_desc.Height = m_desc.BufferDesc.Height;
m_tex_desc.MipLevels = 1;
m_tex_desc.ArraySize = 1;
m_tex_desc.SampleDesc.Count = 1;
m_tex_desc.Usage = D3D11_USAGE_STAGING;
m_tex_desc.BindFlags = 0;
m_tex_desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
m_tex_desc.MiscFlags = 0;
This is swapchain desc
m_desc.BufferDesc.Width = 1366;
m_desc.BufferDesc.Height = 768;
m_desc.BufferDesc.RefreshRate.Numerator = 1;
m_desc.BufferDesc.RefreshRate.Denominator = 60;
m_desc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
m_desc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
m_desc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
m_desc.SampleDesc.Count = 2;
m_desc.SampleDesc.Quality = 0;
m_desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
m_desc.BufferCount = 1;
m_desc.OutputWindow = (HWND)m_dx_win->winId();
m_desc.Windowed = true;
m_desc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
m_desc.Flags = 0;
Class members
private:
IDXGISwapChain* m_swap_chain = 0;
ID3D11DeviceContext* m_context = 0;
Dx_Output_Window* m_dx_win = 0;
IDXGIResource* m_back_buffer_ptr = 0;
ID3D11Resource* m_back_buffer_data = 0;
ID3D11Device* m_device = 0;
D3D_FEATURE_LEVEL m_selected_feature;
DXGI_SWAP_CHAIN_DESC m_desc;
D3D11_TEXTURE2D_DESC m_tex_desc = {};
I look up basically all the resources i could, but i could not find any info why it does work, but the image is all black. I was thinking maybe there is something up with the display, but no, i took the raw data, and display the value, and all the pixel or whatever it was is, was exactly 0, which is black color.
In the "m_desc.OutputWindow = (HWND)m_dx_win->winId();" i tried to also use GetDesktopWindow(), but it doesn't change anything, in fact i got some warnings instead.

Fill CubeTexture with data

I'm puzzled why this isn't working.
I'm trying to add texture data to each of the cube textures faces. For some reason, only the first(+x) works. The MSDN documentation is quite sparse, but it looks like this should do the trick:
// mip-level 0 data
// R8G8B8A8 texture
uint32_t sizeWidth = textureWidth * sizeof(uint8_t) * 4;
if (isCubeTexture)
{
for (uint32_t index = 0; index < gCubemapNumTextures; ++index)
{
const uint32_t subResourceID = D3D11CalcSubresource(0, index, 1);
context->UpdateSubresource(mTexture, subResourceID, NULL, &textureData.at(sizeWidth * textureHeight * index), sizeWidth, 0);
}
}
When debugging and looking at the faces its all just black except the first face, which seems to load fine. So obivously I am doing somerhing wrong, how do you properly upload cubetexture data to all the faces?
EDIT: follow parameters used to create the texture:
D3D11_TEXTURE2D_DESC textureDesc;
ZeroMemory(&textureDesc, sizeof(D3D11_TEXTURE2D_DESC));
textureDesc.Width = textureWidth;
textureDesc.Height = textureHeight;
textureDesc.ArraySize = isCubeTexture ? gCubemapNumTextures : 1;
if (isSRGB)
textureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
else
textureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
textureDesc.MiscFlags = D3D11_RESOURCE_MISC_GENERATE_MIPS;
if (isCubeTexture)
textureDesc.MiscFlags |= D3D11_RESOURCE_MISC_TEXTURECUBE;
DXCALL(device->CreateTexture2D(&textureDesc, NULL, &mTexture));
Then after uploading the data I generate mip chain like this:
context->GenerateMips(mShaderResourceView);
And again, it works fine but only for the first (+x) face.
You create the texture with "0" mip levels by virtue of zero'ing out the texture description. Zero means "full mip chain please", which means more than 1 mip (unless your texture is 1x1).
Your arguments to D3D11CalcSubresource has a third argument of '1', suggesting only one mip, which appears not to be true. Be sure to pass in the correct number of mips to this helper function or it won't calculate the correct subresource index.
You can get the mip count by calling GetDesc() after the texture has been created.

ID3D11DeviceContext::DrawIndexed() Failed

my program is Directx Program that draws a container cube within it smaller cubes....these smaller cubes fall by time i hope you understand what i mean...
The program isn't complete yet ...it should draws the container only ....but it draws nothing ...only the background color is visible... i only included what i think is needed ...
this is the routines that initialize the program
bool Game::init(HINSTANCE hinst,HWND _hw){
Directx11 ::init(hinst , _hw);
return LoadContent();}
Directx11::init()
bool Directx11::init(HINSTANCE hinst,HWND hw){
_hinst=hinst;_hwnd=hw;
RECT rc;
GetClientRect(_hwnd,&rc);
height= rc.bottom - rc.top;
width = rc.right - rc.left;
UINT flags=0;
#ifdef _DEBUG
flags |=D3D11_CREATE_DEVICE_DEBUG;
#endif
HR(D3D11CreateDevice(0,_driverType,0,flags,0,0,D3D11_SDK_VERSION,&d3dDevice,&_featureLevel,&d3dDeviceContext));
if (d3dDevice == 0 || d3dDeviceContext == 0)
return 0;
DXGI_SWAP_CHAIN_DESC sdesc;
ZeroMemory(&sdesc,sizeof(DXGI_SWAP_CHAIN_DESC));
sdesc.Windowed=true;
sdesc.BufferCount=1;
sdesc.BufferDesc.Format=DXGI_FORMAT_R8G8B8A8_UNORM;
sdesc.BufferDesc.Height=height;
sdesc.BufferDesc.Width=width;
sdesc.BufferDesc.Scaling=DXGI_MODE_SCALING_UNSPECIFIED;
sdesc.BufferDesc.ScanlineOrdering=DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
sdesc.OutputWindow=_hwnd;
sdesc.BufferDesc.RefreshRate.Denominator=1;
sdesc.BufferDesc.RefreshRate.Numerator=60;
sdesc.Flags=0;
sdesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
if (m4xMsaaEnable)
{
sdesc.SampleDesc.Count=4;
sdesc.SampleDesc.Quality=m4xMsaaQuality-1;
}
else
{
sdesc.SampleDesc.Count=1;
sdesc.SampleDesc.Quality=0;
}
IDXGIDevice *Device=0;
HR(d3dDevice->QueryInterface(__uuidof(IDXGIDevice),reinterpret_cast <void**> (&Device)));
IDXGIAdapter*Ad=0;
HR(Device->GetParent(__uuidof(IDXGIAdapter),reinterpret_cast <void**> (&Ad)));
IDXGIFactory* fac=0;
HR(Ad->GetParent(__uuidof(IDXGIFactory),reinterpret_cast <void**> (&fac)));
fac->CreateSwapChain(d3dDevice,&sdesc,&swapchain);
ReleaseCOM(Device);
ReleaseCOM(Ad);
ReleaseCOM(fac);
ID3D11Texture2D *back = 0;
HR(swapchain->GetBuffer(0,__uuidof(ID3D11Texture2D),reinterpret_cast <void**> (&back)));
HR(d3dDevice->CreateRenderTargetView(back,0,&RenderTarget));
D3D11_TEXTURE2D_DESC Tdesc;
ZeroMemory(&Tdesc,sizeof(D3D11_TEXTURE2D_DESC));
Tdesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
Tdesc.ArraySize = 1;
Tdesc.Format= DXGI_FORMAT_D24_UNORM_S8_UINT;
Tdesc.Height= height;
Tdesc.Width = width;
Tdesc.Usage = D3D11_USAGE_DEFAULT;
Tdesc.MipLevels=1;
if (m4xMsaaEnable)
{
Tdesc.SampleDesc.Count=4;
Tdesc.SampleDesc.Quality=m4xMsaaQuality-1;
}
else
{
Tdesc.SampleDesc.Count=1;
Tdesc.SampleDesc.Quality=0;
}
HR(d3dDevice->CreateTexture2D(&Tdesc,0,&depthview));
HR(d3dDevice->CreateDepthStencilView(depthview,0,&depth));
d3dDeviceContext->OMSetRenderTargets(1,&RenderTarget,depth);
D3D11_VIEWPORT vp;
vp.TopLeftX=0.0f;
vp.TopLeftY=0.0f;
vp.Width = static_cast <float> (width);
vp.Height= static_cast <float> (height);
vp.MinDepth = 0.0f;
vp.MaxDepth = 1.0f;
d3dDeviceContext -> RSSetViewports(1,&vp);
return true;
SetBuild() Prepare the matrices inside the container for the smaller cubes ....i didnt program it to draw the smaller cubes yet
and this the function that draws the scene
void Game::Render(){
d3dDeviceContext->ClearRenderTargetView(RenderTarget,reinterpret_cast <const float*> (&Colors::LightSteelBlue));
d3dDeviceContext->ClearDepthStencilView(depth,D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL,1.0f,0);
d3dDeviceContext-> IASetInputLayout(_layout);
d3dDeviceContext-> IASetPrimitiveTopology(D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
d3dDeviceContext->IASetIndexBuffer(indices,DXGI_FORMAT_R32_UINT,0);
UINT strides=sizeof(Vertex),off=0;
d3dDeviceContext->IASetVertexBuffers(0,1,&vertices,&strides,&off);
D3DX11_TECHNIQUE_DESC des;
Tech->GetDesc(&des);
Floor * Lookup; /*is a variable to Lookup inside the matrices structure (Floor Contains XMMATRX Piese[9])*/
std::vector<XMFLOAT4X4> filled; // saves the matrices of the smaller cubes
XMMATRIX V=XMLoadFloat4x4(&View),P = XMLoadFloat4x4(&Proj);
XMMATRIX vp = V * P;XMMATRIX wvp;
for (UINT i = 0; i < des.Passes; i++)
{
d3dDeviceContext->RSSetState(BuildRast);
wvp = XMLoadFloat4x4(&(B.Memory[0].Pieces[0])) * vp; // Loading The Matrix at translation(0,0,0)
HR(ShadeMat->SetMatrix(reinterpret_cast<float*> ( &wvp)));
HR(Tech->GetPassByIndex(i)->Apply(0,d3dDeviceContext));
d3dDeviceContext->DrawIndexed(build_ind_count,build_ind_index,build_vers_index);
d3dDeviceContext->RSSetState(PieseRast);
UINT r1=B.GetSize(),r2=filled.size();
for (UINT j = 0; j < r1; j++)
{
Lookup = &B.Memory[j];
for (UINT r = 0; r < Lookup->filledindeces.size(); r++)
{
filled.push_back(Lookup->Pieces[Lookup->filledindeces[r]]);
}
}
for (UINT j = 0; j < r2; j++)
{
ShadeMat->SetMatrix( reinterpret_cast<const float*> (&filled[i]));
Tech->GetPassByIndex(i)->Apply(0,d3dDeviceContext);
d3dDeviceContext->DrawIndexed(piese_ind_count,piese_ind_index,piese_vers_index);
}
}
HR(swapchain->Present(0,0));}
thanks in Advance
One bug in your program appears to be that you're using i, the index of the current pass, as an index into the filled vector, when you should apparently be using j.
Another apparent bug is that in the loop where you are supposed to be iterating over the elements of filled, you're not iterating over all of them. The value r2 is set to the size of filled before you append anything to it during that pass. During the first pass this means that nothing will be drawn by this loop. If your technique only has one pass then this means that the second DrawIndexed call in your code will never be executed.
It also appears you should be only adding matrices to filled once, regardless of the number of the passes the technique has. You should consider if your code is actually meant to work with techniques with multiple passes.

DirectX11 pass current backbuffer as a resource to pixel shader

I'm trying to do a direct 3d 11 "present" function hook and to pass the current backbuffer and the current z-buffer (Depth stencil) as textures to my pixel shader.
I use the following code in my hooked "Present" function:
ID3D11RenderTargetView* pRT = NULL;
ID3D11DepthStencilView* pDS = NULL;
pContext->OMGetRenderTargets(1, &pRT, &pDS);
if (pRT != NULL)
{
ID3D11Texture2D* pBackBuffer = NULL;
pSwapChain->GetBuffer(0, __uuidof(*pBackBuffer), (LPVOID*)&pBackBuffer);
D3D11_TEXTURE2D_DESC bbDesc;
pBackBuffer->GetDesc(&bbDesc);
ID3D11ShaderResourceView* g_refRes = NULL;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
shaderResourceViewDesc.Format = bbDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D_SRV_DIMENSION_UNKNOWN;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = bbDesc.MipLevels;
hr = d3d11Device->CreateShaderResourceView(pBackBuffer, &g_shaderResourceViewDesc, &g_refRes);
d3d11DevCon->PSSetShaderResources(0, 1, &g_refRes);
}
But when I call CreateShaderResourceView() I get E_INVALID_ARG.
Can anybody help me in how to pass the current backbuffer & z-buffer as textures to a pixel shader?
Thanks a lot.

Depth stencil buffer not working directx11

ok i tried everything at this point and I'm really lost....
ID3D11Texture2D* depthStencilTexture;
D3D11_TEXTURE2D_DESC depthTexDesc;
ZeroMemory (&depthTexDesc, sizeof(D3D11_TEXTURE2D_DESC));
depthTexDesc.Width = set->mapSettings["SCREEN_WIDTH"];
depthTexDesc.Height = set->mapSettings["SCREEN_HEIGHT"];
depthTexDesc.MipLevels = 1;
depthTexDesc.ArraySize = 1;
depthTexDesc.Format = DXGI_FORMAT_D32_FLOAT;
depthTexDesc.SampleDesc.Count = 1;
depthTexDesc.SampleDesc.Quality = 0;
depthTexDesc.Usage = D3D11_USAGE_DEFAULT;
depthTexDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
depthTexDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE | D3D11_CPU_ACCESS_READ;
depthTexDesc.MiscFlags = 0;
mDevice->CreateTexture2D(&depthTexDesc, NULL, &depthStencilTexture);
D3D11_DEPTH_STENCIL_DESC dsDesc;
// Depth test parameters
dsDesc.DepthEnable = true;
dsDesc.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
dsDesc.DepthFunc = D3D11_COMPARISON_LESS;//LESS
// Stencil test parameters
dsDesc.StencilEnable = false;
dsDesc.StencilReadMask = 0xFF;
dsDesc.StencilWriteMask = 0xFF;
// Stencil operations if pixel is front-facing
dsDesc.FrontFace.StencilFailOp = D3D11_STENCIL_OP_KEEP; //KEEP
dsDesc.FrontFace.StencilDepthFailOp = D3D11_STENCIL_OP_INCR; //INCR
dsDesc.FrontFace.StencilPassOp = D3D11_STENCIL_OP_KEEP; //KEEP
dsDesc.FrontFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
// Stencil operations if pixel is back-facing
dsDesc.BackFace.StencilFailOp = D3D11_STENCIL_OP_KEEP; //KEEP
dsDesc.BackFace.StencilDepthFailOp = D3D11_STENCIL_OP_DECR; //DECR
dsDesc.BackFace.StencilPassOp = D3D11_STENCIL_OP_KEEP; //KEEP
dsDesc.BackFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
// Create depth stencil state
mDevice->CreateDepthStencilState(&dsDesc, &mDepthStencilState);
D3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc;
ZeroMemory (&depthStencilViewDesc, sizeof(depthStencilViewDesc));
depthStencilViewDesc.Format = depthTexDesc.Format;
depthStencilViewDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthStencilViewDesc.Texture2D.MipSlice = 0;
mDevice->CreateDepthStencilView(depthStencilTexture, &depthStencilViewDesc, &mDepthStencilView);
mDeviceContext->OMSetDepthStencilState(mDepthStencilState, 1);
and then afterwards i call
mDeviceContext->OMSetRenderTargets(1, &mTargetView, mDepthStencilView);
obviously i clean before every frame
mDeviceContext->ClearRenderTargetView(mTargetView, D3DXCOLOR(0.0f, 0.0f, 0.0f, 1.0f));
mDeviceContext->ClearDepthStencilView(mDepthStencilView, D3D11_CLEAR_DEPTH, 1.0f, 0 );
and still it just keeps the last pixel drawn with no testing....
screenshot
PS i've checked the rasterizer and it is correctly drawing only the front faces
any help anyone?
Check your HRESULTs - the call to CreateTexture2D is almost certainly failing because you have specified CPU_ACCESS flags on a DEFAULT texture. Since you never check any errors or pointers, this just propagates NULL to all your depth objects, effectively disabling depth testing.
You can also catch errors like this by enabling D3D debug layers, by adding D3D11_CREATE_DEVICE_DEBUG to the flags on D3D11CreateDevice. If you had done this, you would see the following debug spew:
D3D11 ERROR: ID3D11Device::CreateTexture2D: A D3D11_USAGE_DEFAULT
Resource cannot have any CPUAccessFlags set. The following
CPUAccessFlags bits cannot be set in this case: D3D11_CPU_ACCESS_READ
(1), D3D11_CPU_ACCESS_WRITE (1). [ STATE_CREATION ERROR #98:
CREATETEXTURE2D_INVALIDCPUACCESSFLAGS]

Resources