I'm trying to write a skybox shader in DX10 using the following HLSL code:
//////////////////////////////////////////////////////////////////////////
// world matrix for each rendered object
float4x4 g_mWorld;
// single cubemap texture
Texture2D g_tCubeMap;
// basic mirror texture sampler
SamplerState g_sSamplerMirror
{
Filter = MIN_MAG_MIP_POINT;
AddressU = MIRROR;
AddressV = MIRROR;
};
//////////////////////////////////////////////////////////////////////////
// pre-defined vertex formats for vertex input layouts
struct VS_INPUT
{
float3 Position : POSITION;
};
struct PS_INPUT
{
float4 SPosition : SV_POSITION;
float3 UV : TEXCOORD;
};
//////////////////////////////////////////////////////////////////////////
PS_INPUT VS_Default( VS_INPUT Input )
{
PS_INPUT Output = (PS_INPUT)0;
Output.SPosition = float4(Input.Position,1.0f);
Output.UV = normalize( mul( Output.SPosition, g_mWorld ) ).xyz;
return Output;
}
//////////////////////////////////////////////////////////////////////////
float4 PS_Default( PS_INPUT Input ) : SV_TARGET0
{
return float4( texCUBE( g_sSamplerMirror, Input.UV ) );
}
//////////////////////////////////////////////////////////////////////////
technique10 TECH_Default
{
pass
{
SetVertexShader( CompileShader( vs_4_0, VS_Default() ) );
SetPixelShader( CompileShader( ps_4_0, PS_Default() ) );
SetGeometryShader( 0 );
}
}
Which gives the error "DX-9 style intrinsics are disabled when not in dx9 compatibility mode." on line 46:
return float4( texCUBE( g_sSamplerMirror, Input.UV ) );
Is there an alternative to texCUBE? How can I fix this without enabling dx9 compatibility mode?
Since you are using Shader Model 4 you should be able to create a TextureCube object and then call the Sample method.
Related
I have an application that displays 3D object using D3D11 and DirectXMath. I also want to display a HUD in the Top Left Corner so i thought i would use DirectXTK sprintBatch/spriteFont to do this. My 3D is fine until i add the following code.
Is DirectXTK using it's own shaders and changing some States?
Question is how do i fix this?
m_spriteBatch->Begin();
const wchar_t* output = L"Hello World";
m_font->DrawString(m_spriteBatch.get(), output, DirectX::XMFLOAT2{ 10, 10 }, Colors::Yellow);
m_spriteBatch->End();
Without spritefont->DrawString
With spritefont->DrawString
Here is are my shaders.
Texture2D txDiffuse : register( t0 );
SamplerState samLinear : register(s0);
cbuffer WorldViewProjectionType : register(b0)
{
matrix World;
matrix View;
matrix Projection;
};
cbuffer TransparentBuffer
{
float4 blendAmount;
};
struct VS_INPUT
{
float4 Pos : POSITION;
float2 Tex : TEXCOORD0;
};
struct PS_INPUT
{
float4 Pos : SV_POSITION;
float2 Tex : TEXCOORD0;
};
PS_INPUT VS(VS_INPUT input)
{
PS_INPUT output = (PS_INPUT)0;
output.Pos.w = 1.0f;
output.Pos = mul(input.Pos, World);
output.Pos = mul(output.Pos, View);
output.Pos = mul(output.Pos, Projection);
output.Tex = input.Tex;
return output;
}
float4 PS(PS_INPUT input) : SV_Target
{
float4 color = txDiffuse.Sample(samLinear, input.Tex);
color.a = blendAmount.a;
return color;
}
float4 PSGray(PS_INPUT input) : SV_Target
{
float4 color = txDiffuse.Sample(samLinear, input.Tex);
float fLuminance = 0.299f * color.r + 0.587f * color.g + 0.114f * color.b;
return float4(fLuminance, fLuminance, fLuminance, blendAmount.a);
}
Sweet, i got it working. yippy. after the swapchain->present i did reset all these.
//*************************************************************************
m_pImmediateContext->IASetInputLayout(m_pVertexLayout);
m_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
TurnOffAlphaBlending(m_pImmediateContext);
// Set the depth stencil state.
m_pImmediateContext->OMSetDepthStencilState(m_pdepthStencilState, 1);
m_pImmediateContext->OMSetRenderTargets(1, &m_pdesignerRenderTargetView, m_pdesignerDepthStencilView);
m_pImmediateContext->RSSetState(m_prasterState);
//*************************************************************************
I was doing some experiment with textures as UAV in pixel shader by writing some value on it, but I'm not seeing its effect in the next draw call when I bind the same texture again as an SRV.
Example shader:
RWTexture2D<unsigned int> uav;
Texture2D tex : register(t0);
// Vertex Shader
float4 VS( float4 Pos : POSITION ) : SV_POSITION
{
return Pos;
}
// Pixel Shader, draw1 warm up
float4 PS( float4 Pos : SV_POSITION ) : SV_Target
{
return float4( 1.0f, 1.0f, 0.0f, 1.0f ); // Yellow, with Alpha = 1
}
// Pixel Shader, we are writing onto the texture by binding it as an UAV, draw2
float4 PS1( float4 Pos : SV_POSITION ) : SV_Target
{
if((Pos.x %2) && (Pos.y %2))
{
uav[Pos.xy]=0xFF000000; //some color
}
else
{
uav[Pos.xy]=0x00FF0000; //some color
}
return float4( 1.0f, 0.0f, 0.0f, 1.0f );
}
// Pixel Shader, here we are accessing texture as an SRV, draw3
float4 PS2( float4 Pos : SV_POSITION ) : SV_Target
{
float4 x = tex[Pos.xy];
return x;
}
I can provide the app source code if required.
I enabled the debug layer. It was UAV format mismatch error. In the UAV description, I declared R8G8B8A8_UNORM as a format and I'm accessing the element as UINT in the shader.
description: D3D11 ERROR: ID3D11DeviceContext::Draw: The resource return type for component 0 declared in the shader code (UINT) is not compatible with the resource type bound to Unordered Access View slot 1 of the Pixel Shader unit (UNORM). This mismatch is invalid if the shader actually uses the view [ EXECUTION ERROR #2097372: DEVICE_UNORDEREDACCESSVIEW_RETURN_TYPE_MISMATCH]
Source code:
D3D11_UNORDERED_ACCESS_VIEW_DESC UAVdesc;
ZeroMemory( &SRVdesc, sizeof(SRVdesc));
UAVdesc.Format=DXGI_FORMAT_R8G8B8A8_UNORM;
UAVdesc.ViewDimension=D3D11_UAV_DIMENSION_TEXTURE2D;
UAVdesc.Texture2D.MipSlice=0;
g_pd3dDevice->CreateUnorderedAccessView( g_pTexture, &UAVdesc, &g_pUAV);
Texture created :
D3D11_TEXTURE2D_DESC TextureData;
ZeroMemory( &TextureData, sizeof(TextureData) );
TextureData.ArraySize=1;
TextureData.Height=height;
TextureData.Width=width;
TextureData.Format=DXGI_FORMAT_R8G8B8A8_TYPELESS;
TextureData.CPUAccessFlags=0;
TextureData.BindFlags=D3D11_BIND_SHADER_RESOURCE|D3D11_BIND_RENDER_TARGET|D3D11_BIND_UNORDERED_ACCESS;
TextureData.MipLevels=1;
TextureData.MiscFlags=0;
TextureData.SampleDesc.Count=1;
TextureData.SampleDesc.Quality=0;
TextureData.Usage=D3D11_USAGE_DEFAULT;
D3D11_SUBRESOURCE_DATA InitialData;
ZeroMemory( &InitialData, sizeof(InitialData));
InitialData.pSysMem=pData;
InitialData.SysMemPitch=width * sizeof(UINT);
InitialData.SysMemSlicePitch=width * sizeof(UINT) * height;
g_pd3dDevice->CreateTexture2D( &TextureData, &InitialData, &g_pTexture);
Shader code is already given above.
Fix:
D3D11_UNORDERED_ACCESS_VIEW_DESC UAVdesc;
ZeroMemory( &SRVdesc, sizeof(SRVdesc));
**UAVdesc.Format=DXGI_FORMAT_R32_UINT;**
UAVdesc.ViewDimension=D3D11_UAV_DIMENSION_TEXTURE2D;
UAVdesc.Texture2D.MipSlice=0;
g_pd3dDevice->CreateUnorderedAccessView( g_pTexture, &UAVdesc, &g_pUAV);
confirmed by dumping texture in staging resource. Thanks guys.
I've created a standard Win32 DirectX9 window and I'm rendering to it using a custom effect, however I have a problem where the colours of vertices are not interpolated in the result.
void CRender::Begin()
{
perf.begin();
// Capture device state so it can be restored later.
// We use ID3DXLine::Begin() to fix some bugs that I don't know how to fix.
mpLine->Begin();
// Setup shader
shader.Begin( static_cast<float>(FloatTime()) );
}
void CRender::End()
{
// Reverse order of Begin()
shader.End();
mpLine->End();
}
The problem here lies with mpLine->Begin(), without calling this I get a perfectly nice interpolated triangle, with it the whole triangle has the same colour as the first vertex.
Image for clarification: http://i.imgur.com/vKN4SnE.png
I am using ID3DXLine::Begin() just to set up the device state for me. The reason I am using it is because I'm rendering in the context of another program (a game) by hooking its EndScene(). The game may leave the device in an unusable state causing rendering glitches in my overlay, all these problems go away when using ID3DXLine::Begin() except vertex colours aren't interpolated any more.
Vertex declaration:
// Create the vertex declaration for use with the shaders.
static const D3DVERTEXELEMENT9 vformat[] =
{
{ 0, 0, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION, 0 },
{ 0, 8, D3DDECLTYPE_D3DCOLOR, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_COLOR, 0 },
{ 0, 12, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD, 0 },
D3DDECL_END()
};
HRESULT hr = dev->CreateVertexDeclaration( vformat, &decl );
Effect source:
// Vertex shader input
struct VSIN
{
float2 coord : POSITION;
float4 color : COLOR0;
float2 tex : TEXCOORD0;
};
// Vertex shader output / Pixel shader input
struct VSOUT
{
float4 coord : POSITION;
float4 color : COLOR0;
float2 tex : TEXCOORD0;
float2 pos : TEXCOORD1;
};
uniform float2 screen;
uniform float2x4 project;
float4 vstransform( float2 coord, const float2 shift )
{
float2 final = ( mul( project, float4(coord.x,coord.y,1,1) ) + shift ) * 2 / screen;
return float4( final.x-1, 1-final.y, 0, 1 );
}
VSOUT vsfix( VSIN data )
{
VSOUT vert;
const float2 shift = { -0.5f, -0.5f };
vert.coord = vstransform( data.coord, shift );
vert.color = data.color;
vert.tex = data.tex;
vert.pos = vert.coord.xy;
return vert;
}
float4 diffuse( VSOUT vert ) : COLOR
{
float4 px = vert.color;
return px;
}
technique Diffuse
{
pass p0
{
PixelShader = compile ps_2_0 diffuse();
VertexShader = compile vs_2_0 vsfix();
}
}
Anyone can tell why i get this Error:
The current vertex declaration does not include all the elements required by the current vertex shader. TextureCoordinate0 is missing.
With the standard Vertex Shader, everything is fine.
Here is my Shader File:
float4x4 World;
float4x4 View;
float4x4 Projection;
float4 color;
float seaLevel;
texture myTexture;
float maxHeight = 128;
float height;
sampler2D mySampler = sampler_state
{
Texture = <myTexture>;
MinFilter = Point;
MagFilter = Point;
MipFilter = Point;
AddressU = Clamp;
AddressV = Clamp;
};
struct VertexShaderInput
{
float4 Position : POSITION0;
};
struct VertexShaderOutput
{
float4 Position : POSITION0;
};
struct VS_INPUT
{
float4 position : POSITION;
float4 uv : TEXCOORD0;
};
struct VS_OUTPUT
{
float4 position : POSITION;
float4 uv : TEXCOORD0;
float4 worldPos : TEXCOORD1;
};
VS_OUTPUT Transform(VS_INPUT In)
{
VS_OUTPUT Out = (VS_OUTPUT)0;
float4x4 viewProj = mul(View, Projection);
float4x4 worldViewProj= mul(World, viewProj);
float height = tex2Dlod ( mySampler, float4(In.uv.xy , 0 , 0 ) );
In.position.y = height * maxHeight;
Out.worldPos = mul(In.position, World);
Out.position = mul( In.position , worldViewProj);
Out.uv = In.uv;
return Out;
}
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
worldPosition = float4(normalize(worldPosition.xyz) * seaLevel, 1);
float4 viewPosition = mul(worldPosition, View);
output.Position = mul(viewPosition, Projection);
return output;
}
float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
return color;
}
technique Technique1
{
pass Pass1
{
VertexShader = compile vs_3_0 Transform();
PixelShader = compile ps_3_0 PixelShaderFunction();
}
}
The model that your are trying to draw has vertex that no contains texture coordinates.
Your vertex shader needs a texture coordinate to work, as seen in the struct that is used by the vertex shader specified in your technique.
struct VS_INPUT
{
float4 position : POSITION;
float4 uv : TEXCOORD0;
};
technique Technique1
{
pass Pass1
{
VertexShader = compile vs_3_0 Transform();
So, you have two choices:
1) Remove "uv" from VS_INPUT
2) Add the texture coordiante field to the vertex used by your model.
I solved same problem by opening the model in 3D Studio Max, adding "UVM Map" modifier and exporting back. Not a satisfying solution but worked for me.
I am really stuck on this one. My HLSL for rendering point sprites with texture coordinates for a sprite sheet works fine on all ATI cards but not on any NVIDIA cards. On NVIDIA cards the passed texture coordinates map to the whole sprite sheet rather than a portion of it. Strange but it works fine on ATI cards. Am I missing something unique to ATI cards?
Here is my shader
struct VS_INPUT
{
float4 Position : POSITION;
float4 Color : COLOR;
float4 Texture : TEXCOORD0;
//float1 Psize : PSIZE0;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float4 Color : COLOR;
float2 Texture : TEXCOORD0;
float2 Texture_zw : TEXCOORD1;
float1 Psize : PSIZE;
};
float4x4 WorldViewProj;
texture Tex <string name = "sprite_coin_test.dds";>;
sampler2D s_2D;
float offset_x=0.0;
float offset_y=0.0;
sampler S0 = sampler_state
{
Texture = (Tex);
MinFilter = ANISOTROPIC; //LINEAR;
MagFilter = ANISOTROPIC; //LINEAR;
MipFilter = LINEAR;
};
VS_OUTPUT vs_main( in VS_INPUT In )
{
VS_OUTPUT Out=(VS_OUTPUT)0; //create an output vertex
Out.Position = mul(In.Position, WorldViewProj); //apply vertex transformation
Out.Texture = In.Texture;
Out.Texture_zw = float2(In.Texture.z, In.Texture.w);
Out.Color = In.Color;
//Out.Psize = In.Psize;
Out.Psize=(Out.Position.z)*10.0;
return Out; //return output vertex
}
float4 PS_Particle_main(float2 vPos: TEXCOORD0, float2 text_zw: TEXCOORD1) : COLOR
{
vPos.x*=offset_x;
vPos.y*=offset_y;
vPos += float2(text_zw[0], text_zw[1]);
return tex2D(s_2D, vPos);
}
technique RenderVS
{
pass p0
{
AlphaBlendEnable = true;
AlphaTestEnable = false;
SrcBlend = SRCALPHA;
DestBlend = INVSRCALPHA;
POINTSPRITEENABLE = true;
POINTSCALEENABLE = true;
POINTSIZE_MIN = 1.0f;
POINTSIZE_MAX = 400.0f;
POINTSCALE_A = 1.0f;
POINTSCALE_B = 1.0f;
POINTSCALE_C = 1.0f;
ZWRITEENABLE = false;
Sampler[0] = (S0);
VertexShader = compile vs_1_1 vs_main();
PixelShader = compile ps_2_0 PS_Particle_main();
}
}
I had the same problem for a while and it costed me a lot of time. I have not found any documentation about this problematic, but with testing on ATI and NVIDIA devices I found the difference. With pointsprites ATI works all fine, it interpolates the texturecoordinates properly into TEXCOORD0. In contrast NVIDIA does nearly the same, but they write the texturecoordinates in all fields with a TEXCOORD-interpolator. So all information which you pass by texturecoordinates to the pixelshader will be overwritten. I solved this by using a COLOR-interpolator instead of a TEXCOORD-interpolator. Very strange, but it works fine for me :) In your case it would be:
struct VS_OUTPUT
{
float4 Position : POSITION;
float4 Color : COLOR0;
float2 Texture : TEXCOORD0;
float2 Texture_zw : COLOR1;
float1 Psize : PSIZE;
};