DirectXTK and 3D Ojbect - directx

I have an application that displays 3D object using D3D11 and DirectXMath. I also want to display a HUD in the Top Left Corner so i thought i would use DirectXTK sprintBatch/spriteFont to do this. My 3D is fine until i add the following code.
Is DirectXTK using it's own shaders and changing some States?
Question is how do i fix this?
m_spriteBatch->Begin();
const wchar_t* output = L"Hello World";
m_font->DrawString(m_spriteBatch.get(), output, DirectX::XMFLOAT2{ 10, 10 }, Colors::Yellow);
m_spriteBatch->End();
Without spritefont->DrawString
With spritefont->DrawString
Here is are my shaders.
Texture2D txDiffuse : register( t0 );
SamplerState samLinear : register(s0);
cbuffer WorldViewProjectionType : register(b0)
{
matrix World;
matrix View;
matrix Projection;
};
cbuffer TransparentBuffer
{
float4 blendAmount;
};
struct VS_INPUT
{
float4 Pos : POSITION;
float2 Tex : TEXCOORD0;
};
struct PS_INPUT
{
float4 Pos : SV_POSITION;
float2 Tex : TEXCOORD0;
};
PS_INPUT VS(VS_INPUT input)
{
PS_INPUT output = (PS_INPUT)0;
output.Pos.w = 1.0f;
output.Pos = mul(input.Pos, World);
output.Pos = mul(output.Pos, View);
output.Pos = mul(output.Pos, Projection);
output.Tex = input.Tex;
return output;
}
float4 PS(PS_INPUT input) : SV_Target
{
float4 color = txDiffuse.Sample(samLinear, input.Tex);
color.a = blendAmount.a;
return color;
}
float4 PSGray(PS_INPUT input) : SV_Target
{
float4 color = txDiffuse.Sample(samLinear, input.Tex);
float fLuminance = 0.299f * color.r + 0.587f * color.g + 0.114f * color.b;
return float4(fLuminance, fLuminance, fLuminance, blendAmount.a);
}

Sweet, i got it working. yippy. after the swapchain->present i did reset all these.
//*************************************************************************
m_pImmediateContext->IASetInputLayout(m_pVertexLayout);
m_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
TurnOffAlphaBlending(m_pImmediateContext);
// Set the depth stencil state.
m_pImmediateContext->OMSetDepthStencilState(m_pdepthStencilState, 1);
m_pImmediateContext->OMSetRenderTargets(1, &m_pdesignerRenderTargetView, m_pdesignerDepthStencilView);
m_pImmediateContext->RSSetState(m_prasterState);
//*************************************************************************

Related

Vertex color is not interpolated in the context of ID3DXLine

I've created a standard Win32 DirectX9 window and I'm rendering to it using a custom effect, however I have a problem where the colours of vertices are not interpolated in the result.
void CRender::Begin()
{
perf.begin();
// Capture device state so it can be restored later.
// We use ID3DXLine::Begin() to fix some bugs that I don't know how to fix.
mpLine->Begin();
// Setup shader
shader.Begin( static_cast<float>(FloatTime()) );
}
void CRender::End()
{
// Reverse order of Begin()
shader.End();
mpLine->End();
}
The problem here lies with mpLine->Begin(), without calling this I get a perfectly nice interpolated triangle, with it the whole triangle has the same colour as the first vertex.
Image for clarification: http://i.imgur.com/vKN4SnE.png
I am using ID3DXLine::Begin() just to set up the device state for me. The reason I am using it is because I'm rendering in the context of another program (a game) by hooking its EndScene(). The game may leave the device in an unusable state causing rendering glitches in my overlay, all these problems go away when using ID3DXLine::Begin() except vertex colours aren't interpolated any more.
Vertex declaration:
// Create the vertex declaration for use with the shaders.
static const D3DVERTEXELEMENT9 vformat[] =
{
{ 0, 0, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION, 0 },
{ 0, 8, D3DDECLTYPE_D3DCOLOR, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_COLOR, 0 },
{ 0, 12, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD, 0 },
D3DDECL_END()
};
HRESULT hr = dev->CreateVertexDeclaration( vformat, &decl );
Effect source:
// Vertex shader input
struct VSIN
{
float2 coord : POSITION;
float4 color : COLOR0;
float2 tex : TEXCOORD0;
};
// Vertex shader output / Pixel shader input
struct VSOUT
{
float4 coord : POSITION;
float4 color : COLOR0;
float2 tex : TEXCOORD0;
float2 pos : TEXCOORD1;
};
uniform float2 screen;
uniform float2x4 project;
float4 vstransform( float2 coord, const float2 shift )
{
float2 final = ( mul( project, float4(coord.x,coord.y,1,1) ) + shift ) * 2 / screen;
return float4( final.x-1, 1-final.y, 0, 1 );
}
VSOUT vsfix( VSIN data )
{
VSOUT vert;
const float2 shift = { -0.5f, -0.5f };
vert.coord = vstransform( data.coord, shift );
vert.color = data.color;
vert.tex = data.tex;
vert.pos = vert.coord.xy;
return vert;
}
float4 diffuse( VSOUT vert ) : COLOR
{
float4 px = vert.color;
return px;
}
technique Diffuse
{
pass p0
{
PixelShader = compile ps_2_0 diffuse();
VertexShader = compile vs_2_0 vsfix();
}
}

Fog shader camera

I have some difficulties with my vertex-fragment fog shader in Unity. I have a good visual result but the problem is that the gradient is based on the camera's position, it moves as the camera moves. I don't know how to fix it.
Here is the shader code.
struct v2f {
float4 pos : SV_POSITION;
float4 grabUV : TEXCOORD0;
float2 uv_depth : TEXCOORD1;
float4 interpolatedRay : TEXCOORD2;
float4 screenPos : TEXCOORD3;
};
v2f vert(appdata_base v) {
v2f o;
o.pos = mul(UNITY_MATRIX_MVP, v.vertex);
o.uv_depth = v.texcoord.xy;
o.grabUV = ComputeGrabScreenPos(o.pos);
half index = v.vertex.z;
o.screenPos = ComputeScreenPos(o.pos);
o.interpolatedRay = mul(UNITY_MATRIX_MV, v.vertex);
return o;
}
sampler2D _GrabTexture;
float4 frag(v2f IN) : COLOR {
float3 uv = UNITY_PROJ_COORD(IN.grabUV);
float dpth = UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, uv));
dpth = LinearEyeDepth(dpth);
float4 wsPos = (IN.screenPos + dpth * IN.interpolatedRay); // Here is the problem but how to fix it
float fogVert = max(0.0, (wsPos.y - _Depth) * (_DepthScale * 0.1f));
fogVert *= fogVert;
fogVert = (exp (-fogVert));
return fogVert;
}
It seems that it's a Matrix problem
o.interpolatedRay = mul(UNITY_MATRIX_MV, v.vertex);

XNA/HLSL Heightmap from VertexShader

Anyone can tell why i get this Error:
The current vertex declaration does not include all the elements required by the current vertex shader. TextureCoordinate0 is missing.
With the standard Vertex Shader, everything is fine.
Here is my Shader File:
float4x4 World;
float4x4 View;
float4x4 Projection;
float4 color;
float seaLevel;
texture myTexture;
float maxHeight = 128;
float height;
sampler2D mySampler = sampler_state
{
Texture = <myTexture>;
MinFilter = Point;
MagFilter = Point;
MipFilter = Point;
AddressU = Clamp;
AddressV = Clamp;
};
struct VertexShaderInput
{
float4 Position : POSITION0;
};
struct VertexShaderOutput
{
float4 Position : POSITION0;
};
struct VS_INPUT
{
float4 position : POSITION;
float4 uv : TEXCOORD0;
};
struct VS_OUTPUT
{
float4 position : POSITION;
float4 uv : TEXCOORD0;
float4 worldPos : TEXCOORD1;
};
VS_OUTPUT Transform(VS_INPUT In)
{
VS_OUTPUT Out = (VS_OUTPUT)0;
float4x4 viewProj = mul(View, Projection);
float4x4 worldViewProj= mul(World, viewProj);
float height = tex2Dlod ( mySampler, float4(In.uv.xy , 0 , 0 ) );
In.position.y = height * maxHeight;
Out.worldPos = mul(In.position, World);
Out.position = mul( In.position , worldViewProj);
Out.uv = In.uv;
return Out;
}
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
worldPosition = float4(normalize(worldPosition.xyz) * seaLevel, 1);
float4 viewPosition = mul(worldPosition, View);
output.Position = mul(viewPosition, Projection);
return output;
}
float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
return color;
}
technique Technique1
{
pass Pass1
{
VertexShader = compile vs_3_0 Transform();
PixelShader = compile ps_3_0 PixelShaderFunction();
}
}
The model that your are trying to draw has vertex that no contains texture coordinates.
Your vertex shader needs a texture coordinate to work, as seen in the struct that is used by the vertex shader specified in your technique.
struct VS_INPUT
{
float4 position : POSITION;
float4 uv : TEXCOORD0;
};
technique Technique1
{
pass Pass1
{
VertexShader = compile vs_3_0 Transform();
So, you have two choices:
1) Remove "uv" from VS_INPUT
2) Add the texture coordiante field to the vertex used by your model.
I solved same problem by opening the model in 3D Studio Max, adding "UVM Map" modifier and exporting back. Not a satisfying solution but worked for me.

HLSL invalid ps_2_0 input semantic POSITION0

I'm attempting to write a phong shader effect for Dx9 in RenderMonkey.
I'm getting a compile error in the pixel shader
*"invalid ps_2_0 input semantic 'POSITION0'"*
and I'm not sure how to fix it, although I know it's got to be something to do with the POSITION0 semantic in VS_OUTPUT.
I tried changing VS_OUTPUT's Pos semantic to TEXCOORD0, but then the system reports that
vertex shader must minimally write all four components of POSITION
Shaders are supplied below. Any suggestions?
Here's my vertex shader:
struct VS_INPUT
{
float4 Pos : POSITION0;
float3 Normal : NORMAL0;
};
struct VS_OUTPUT
{
float4 Pos : POSITION0;
float3 Normal : TEXCOORD0;
};
VS_OUTPUT vs_main( VS_INPUT Input )
{
VS_OUTPUT Output;
Output.Pos = Input.Pos;
Output.Normal = Input.Normal;
return Output;
}
and my pixel shader:
float4x4 matViewProjection;
// light source
float4 lightPos;
float4 Ambient;
float4 Diffuse;
float4 Specular;
// material reflection properties
float4 Ke;
float4 Ka;
float4 Kd;
float4 Ks;
float nSpecular;
// eye
float4 eyePosition;
struct VS_OUTPUT
{
float4 Pos : POSITION0;
float3 Normal : TEXCOORD0;
};
float4 ps_main( VS_OUTPUT vsOutput ) : COLOR0
{
vsOutput.Pos = mul( vsOutput.Pos, matViewProjection );
float3 ViewDirection = normalize( eyePosition.xyz - vsOutput.Pos.xyz );
float3 LightDirection = normalize( lightPos.xyz - vsOutput.Pos.xyz );
float3 N = normalize( vsOutput.Normal );
float3 R = reflect( -LightDirection, N );
float LdotN = max( 0.0, dot( LightDirection, N ) );
float VdotR = max( 0.0, dot( ViewDirection, R ) );
// find colour components
float4 a = Ka * Ambient;
float4 d = Kd * Diffuse * LdotN;
float4 s = Ks * Specular * pow( VdotR, nSpecular );
float4 FragColour = Ke + a + d + s;
return FragColour;
}
Okay, I found a solution for those interested.
The Vertex Shader should have the following structs defined:
struct VS_INPUT
{
float4 Pos : POSITION0;
float3 Normal : NORMAL0;
};
struct VS_OUTPUT
{
float4 Pos : POSITION0;
float4 PosOut : TEXCOORD0;
float3 Normal : TEXCOORD1;
};
The VS_OUTPUT struct should be different in the pixel shader:
struct VS_OUTPUT
{
float4 PosOut : TEXCOORD0;
float3 Normal : TEXCOORD1;
};
My problem stemmed from the fact that you can't have a POSITION semantic as input to the pixel shader. At least for ps_2_0.
As user, one is not allowed to use the value of POSITION0 of the VertexShaderOutput in the PixelShaderFunction. This attribute seems to be cut off at some point between Vertex and Pixel Shader.
Instead you need to declare another attribute in the VertexshaderOutput, e.g.,
float4 newPosition : TEXCOORD1;
which you assign the same value as POSITION0. This new attribute, you may use ist in the PixelShaderfunction.

HLSL point sprite texture coordinates work on ATI not NVIDIA

I am really stuck on this one. My HLSL for rendering point sprites with texture coordinates for a sprite sheet works fine on all ATI cards but not on any NVIDIA cards. On NVIDIA cards the passed texture coordinates map to the whole sprite sheet rather than a portion of it. Strange but it works fine on ATI cards. Am I missing something unique to ATI cards?
Here is my shader
struct VS_INPUT
{
float4 Position : POSITION;
float4 Color : COLOR;
float4 Texture : TEXCOORD0;
//float1 Psize : PSIZE0;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float4 Color : COLOR;
float2 Texture : TEXCOORD0;
float2 Texture_zw : TEXCOORD1;
float1 Psize : PSIZE;
};
float4x4 WorldViewProj;
texture Tex <string name = "sprite_coin_test.dds";>;
sampler2D s_2D;
float offset_x=0.0;
float offset_y=0.0;
sampler S0 = sampler_state
{
Texture = (Tex);
MinFilter = ANISOTROPIC; //LINEAR;
MagFilter = ANISOTROPIC; //LINEAR;
MipFilter = LINEAR;
};
VS_OUTPUT vs_main( in VS_INPUT In )
{
VS_OUTPUT Out=(VS_OUTPUT)0; //create an output vertex
Out.Position = mul(In.Position, WorldViewProj); //apply vertex transformation
Out.Texture = In.Texture;
Out.Texture_zw = float2(In.Texture.z, In.Texture.w);
Out.Color = In.Color;
//Out.Psize = In.Psize;
Out.Psize=(Out.Position.z)*10.0;
return Out; //return output vertex
}
float4 PS_Particle_main(float2 vPos: TEXCOORD0, float2 text_zw: TEXCOORD1) : COLOR
{
vPos.x*=offset_x;
vPos.y*=offset_y;
vPos += float2(text_zw[0], text_zw[1]);
return tex2D(s_2D, vPos);
}
technique RenderVS
{
pass p0
{
AlphaBlendEnable = true;
AlphaTestEnable = false;
SrcBlend = SRCALPHA;
DestBlend = INVSRCALPHA;
POINTSPRITEENABLE = true;
POINTSCALEENABLE = true;
POINTSIZE_MIN = 1.0f;
POINTSIZE_MAX = 400.0f;
POINTSCALE_A = 1.0f;
POINTSCALE_B = 1.0f;
POINTSCALE_C = 1.0f;
ZWRITEENABLE = false;
Sampler[0] = (S0);
VertexShader = compile vs_1_1 vs_main();
PixelShader = compile ps_2_0 PS_Particle_main();
}
}
I had the same problem for a while and it costed me a lot of time. I have not found any documentation about this problematic, but with testing on ATI and NVIDIA devices I found the difference. With pointsprites ATI works all fine, it interpolates the texturecoordinates properly into TEXCOORD0. In contrast NVIDIA does nearly the same, but they write the texturecoordinates in all fields with a TEXCOORD-interpolator. So all information which you pass by texturecoordinates to the pixelshader will be overwritten. I solved this by using a COLOR-interpolator instead of a TEXCOORD-interpolator. Very strange, but it works fine for me :) In your case it would be:
struct VS_OUTPUT
{
float4 Position : POSITION;
float4 Color : COLOR0;
float2 Texture : TEXCOORD0;
float2 Texture_zw : COLOR1;
float1 Psize : PSIZE;
};

Resources