DX11 HLSL Secondary Texture Coordinates Lost - delphi

Been banging my head up against the wall with this for a while. Despite the fact that I THINK I have a proper Vertex Format defined with D3D11_INPUT_ELEMENT_DESC, no matter what I do, I can't see to read my TEXCOORD1 values from this shader. To test this shader, I put random values into my second set of UV coordinates just to see if they were reaching the shader, but to my dismay, I haven't been able to find these random values anywhere. I have also watched the data go into the mapped memory directly, and I am pretty sure the random values were there when they were mapped.
Here is the Shader code:
sampler ImageSampler: register(s0);
Texture2D <float4> ImageTexture: register(t0);
Texture2D <float4> ReflectionTexture: register(t1);
//Texture2D <float4> ReflectionMap: register(t0);
struct PS_IN
{
float4 InPos: SV_POSITION;
float2 InTex: TEXCOORD;
float2 InRef: TEXCOORD1;
float4 InCol: COLOR0;
};
float4 main(PS_IN input): SV_TARGET
{
float4 res;
float4 mul;
float2 tcRef;
float4 res1 = ImageTexture.Sample(ImageSampler, input.InTex) * input.InCol;
float4 res2 = ReflectionTexture.Sample(ImageSampler, input.InRef+input.InTex);
mul.r = 0.5;
mul.g = 0.5;
mul.b = 0.5;
mul.a = 0.5;
res = res1 + res2;
res = res * mul;
res.a = res1.a;
res.r = input.InRef.x;//<-----should be filled with random stuff... not working
res.b = input.InRef.y;//<-----should be filled with random stuff... not working
return res;
}
Here is my D3D11_ELEMENT_DESC... (sorry it is in pascal, but I like pascal)
const
CanvasVertexLayout: array[0..3] of D3D11_INPUT_ELEMENT_DESC =
((SemanticName: 'POSITION';
SemanticIndex: 0;
Format: DXGI_FORMAT_R32G32_FLOAT;
InputSlot: 0;
AlignedByteOffset: 0;
InputSlotClass: D3D11_INPUT_PER_VERTEX_DATA;
InstanceDataStepRate: 0),
(SemanticName: 'TEXCOORD';
SemanticIndex: 0;
Format: DXGI_FORMAT_R32G32_FLOAT;
InputSlot: 0;
AlignedByteOffset: 8;
InputSlotClass: D3D11_INPUT_PER_VERTEX_DATA;
InstanceDataStepRate: 0),
(SemanticName: 'TEXCOORD';
SemanticIndex: 1;
Format: DXGI_FORMAT_R32G32_FLOAT;
InputSlot: 0;
AlignedByteOffset: 16;
InputSlotClass: D3D11_INPUT_PER_VERTEX_DATA;
InstanceDataStepRate: 0),
(SemanticName: 'COLOR';
SemanticIndex: 0;
Format: DXGI_FORMAT_R8G8B8A8_UNORM;
InputSlot: 0;
AlignedByteOffset: 24;
InputSlotClass: D3D11_INPUT_PER_VERTEX_DATA;
InstanceDataStepRate: 0)
);
And here's the Vertext Struct
TVertexEntry = packed record
X, Y: Single;
U, V: Single;
u2,v2:single;
Color: LongWord;
end;
Since the COLOR semantic follows the TEXTURE semantics, my best guess is that the problem is with the SHADER and not the pascal code... but since I'm new to this kind of stuff, I'm obviously lost
Any insight is appreciated.

Answering my own question. Since I'm new to Shaders in general, maybe this will help some other newbs.
I was assuming that all I needed to do was add a second set of UV coordinates to the Vertex Format and add a D3D11_INPUT_ELEMENT_DESC for it. However, there is also a vertex shader involved, more-or-less a passthrough and that vertex shader needs to be aware of the new UV coordinates and let them pass through. I was just making a 2D engine so I didn't think that I'd even have to mess with VertexShaders... go figure. So I modified the vertex shader, and this was the result:
void main(
float2 InPos: POSITION0,
float2 InTex: TEXCOORD0,
float2 InTex2: TEXCOORD1,//<--added
float4 InCol: COLOR0,
out float4 OutPos: SV_POSITION,
out float2 OutTex: TEXCOORD2,
out float2 OutTex2: TEXCOORD3,//<--added
out float4 OutCol: COLOR0)
{
OutPos = float4(InPos, 0.0, 1.0);
OutTex = InTex;
OutCol = InCol;
OutTex2 = InTex2;//<--added
}

Related

Update texture from vector directx9

I' m trying to render two textures one for RGB and another on for the alpha channel, I blend them together with a shader.
The alpha channel texture doesn't overlap properly to the RGB one. It seems to be stretched.
The alpha channel texture changes at every frame and I need to fill starting from an array of uint8_t by the following fuction:
D3DLOCKED_RECT locked_rect;
HRESULT hr = alpha_tex->LockRect(0, &locked_rect, nullptr, 0);
if (!FAILED(hr)) {
ret_code = 0;
BYTE *p_dst = (BYTE *)locked_rect.pBits;
for (uint y = 0; y < height; y++) {
memcpy(p_dst, alpha_array, width);
alpha_array += width;
p_dst += locked_rect.Pitch;
}
alpha_tex->UnlockRect(0);
}
where the alpha_array is a uint8_t array containing the alpha values.
To render the texture i use the following function:
hwctx->d3d9device->Clear(0, 0, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, 0xffeeeeee, 1.0f, 0);
hwctx->d3d9device->BeginScene();
ctx->mFX->SetTechnique(ctx->mhTech);
ctx->texRGB->GetSurfaceLevel(0, &ctx->surfRGB);
hwctx->d3d9device->StretchRect((IDirect3DSurface9*)s->vdrFrame->data[3], NULL, ctx->surfRGB, NULL, D3DTEXF_LINEAR);
ctx->mFX->SetTexture(ctx->mhTexRGB, ctx->texRGB);
ctx->mFX->SetTexture(ctx->mhTexAlpha, ctx->texAlpha);
// Enable alpha blending.
hwctx->d3d9device->SetRenderState(D3DRS_ALPHABLENDENABLE, true);
hwctx->d3d9device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
hwctx->d3d9device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
UINT numPasses = 0;
ctx->mFX->Begin(&numPasses, 0);
for (UINT i = 0; i < numPasses; ++i){
ctx->mFX->BeginPass(i);
hwctx->d3d9device->DrawPrimitive(D3DPT_TRIANGLEFAN, 0, 2);
ctx->mFX->EndPass();
}
ctx->mFX->End();
hwctx->d3d9device->EndScene();
hwctx->d3d9device->Present(0, 0, 0, 0);
// Disable alpha blending.
hwctx->d3d9device->SetRenderState(D3DRS_ALPHABLENDENABLE, false);
I combine the textures by vertex/pixel shader:
uniform extern texture gTexRGB;
uniform extern texture gTexAlpha;
sampler TexRGB = sampler_state{
Texture = <gTexRGB>;
AddressU = WRAP;
AddressV = WRAP;
};
sampler TexAlpha = sampler_state{
Texture = <gTexAlpha>;
AddressU = WRAP;
AddressV = WRAP;
};
struct OutputVS{
float4 posH : POSITION0;
float2 tex0 : TEXCOORD0;
};
OutputVS TextureBlendingVS(float2 tex0: TEXCOORD0){
// Zero out our output.
OutputVS outVS = (OutputVS)0;
// Pass on texture coordinates to be interpolated in rasterization.
outVS.tex0 = tex0;
// Done--return the output.
return outVS;
}
float4 TextureBlendingPS(float2 tex0 : TEXCOORD0) : COLOR{
float3 rgb = tex2D(TexRGB, tex0).rgb;
float alpha = tex2D(TexAlpha, tex0).a;
return float4(rgb, alpha);
}
technique DirLightTexTech{
pass P0 {
// Specify the vertex and pixel shader associated with this pass.
vertexShader = compile vs_2_0 TextureBlendingVS();
pixelShader = compile ps_2_0 TextureBlendingPS();
}
}
The size of the textures is the same but during the rendering something goes wrong.
Please help me. :)

Vertex color is not interpolated in the context of ID3DXLine

I've created a standard Win32 DirectX9 window and I'm rendering to it using a custom effect, however I have a problem where the colours of vertices are not interpolated in the result.
void CRender::Begin()
{
perf.begin();
// Capture device state so it can be restored later.
// We use ID3DXLine::Begin() to fix some bugs that I don't know how to fix.
mpLine->Begin();
// Setup shader
shader.Begin( static_cast<float>(FloatTime()) );
}
void CRender::End()
{
// Reverse order of Begin()
shader.End();
mpLine->End();
}
The problem here lies with mpLine->Begin(), without calling this I get a perfectly nice interpolated triangle, with it the whole triangle has the same colour as the first vertex.
Image for clarification: http://i.imgur.com/vKN4SnE.png
I am using ID3DXLine::Begin() just to set up the device state for me. The reason I am using it is because I'm rendering in the context of another program (a game) by hooking its EndScene(). The game may leave the device in an unusable state causing rendering glitches in my overlay, all these problems go away when using ID3DXLine::Begin() except vertex colours aren't interpolated any more.
Vertex declaration:
// Create the vertex declaration for use with the shaders.
static const D3DVERTEXELEMENT9 vformat[] =
{
{ 0, 0, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION, 0 },
{ 0, 8, D3DDECLTYPE_D3DCOLOR, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_COLOR, 0 },
{ 0, 12, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD, 0 },
D3DDECL_END()
};
HRESULT hr = dev->CreateVertexDeclaration( vformat, &decl );
Effect source:
// Vertex shader input
struct VSIN
{
float2 coord : POSITION;
float4 color : COLOR0;
float2 tex : TEXCOORD0;
};
// Vertex shader output / Pixel shader input
struct VSOUT
{
float4 coord : POSITION;
float4 color : COLOR0;
float2 tex : TEXCOORD0;
float2 pos : TEXCOORD1;
};
uniform float2 screen;
uniform float2x4 project;
float4 vstransform( float2 coord, const float2 shift )
{
float2 final = ( mul( project, float4(coord.x,coord.y,1,1) ) + shift ) * 2 / screen;
return float4( final.x-1, 1-final.y, 0, 1 );
}
VSOUT vsfix( VSIN data )
{
VSOUT vert;
const float2 shift = { -0.5f, -0.5f };
vert.coord = vstransform( data.coord, shift );
vert.color = data.color;
vert.tex = data.tex;
vert.pos = vert.coord.xy;
return vert;
}
float4 diffuse( VSOUT vert ) : COLOR
{
float4 px = vert.color;
return px;
}
technique Diffuse
{
pass p0
{
PixelShader = compile ps_2_0 diffuse();
VertexShader = compile vs_2_0 vsfix();
}
}

How to pass textures to DirectX 9 pixel shader?

I have pixel shader
// fxc.exe tiles.fs /T ps_3_0 /Fotiles.fsc /Fctiles.fsl
struct PSInput
{
float4 Pos : TEXCOORD0;
float3 Normal : TEXCOORD1;
float2 TexcoordUV : TEXCOORD2;
float2 TexcoordST : TEXCOORD3;
};
sampler2D sampler0; //uniform
sampler2D sampler1; //uniform
sampler2D sampler2; //uniform
sampler2D sampler3; //uniform
sampler2D alphamap1;//uniform
sampler2D alphamap2;//uniform
sampler2D alphamap3;//uniform
uniform int tex_count = 0;
uniform float4 color_ambient = float4(0.75, 0.75, 0.75, 1.0);
uniform float4 color_diffuse = float4(0.25, 0.25, 0.25, 1.0);
uniform float4 color_specular = float4(1.0, 1.0, 1.0, 1.0);
uniform float shininess = 77.0f;
uniform float3 light_position = float3(12.0f, 32.0f, 560.0f);
float4 main(PSInput In) : COLOR
{
float3 light_direction = normalize(light_position - (float3)In.Pos);
float3 normal = normalize(In.Normal);
float3 half_vector = normalize(light_direction + normalize((float3)In.Pos));
float diffuse = max(0.0, dot(normal, light_direction));
float specular = pow(max(0.0, dot(In.Normal, half_vector)), shininess);
float4 color = tex2D(sampler0, In.TexcoordUV);
if (tex_count > 0){
float4 temp = tex2D(sampler1, In.TexcoordUV);
float4 amap = tex2D(alphamap1, In.TexcoordST);
color = lerp(color, temp, amap.a);
}
if (tex_count > 1){
float4 temp = tex2D(sampler2, In.TexcoordUV);
float4 amap = tex2D(alphamap2, In.TexcoordST);
color = lerp(color, temp, amap.a);
}
if (tex_count > 2){
float4 temp = tex2D(sampler3, In.TexcoordUV);
float4 amap = tex2D(alphamap3, In.TexcoordST);
color = lerp(color, temp, amap.a);
}
color = color * color_ambient + diffuse * color_diffuse + specular * color_specular;
return color;
}
vertex shader
// fxc.exe tiles.vs /T vs_3_0 /Fotiles.vsc /Fctiles.vsl
struct VSInput
{
float3 Pos : POSITION;
float3 Normal : NORMAL;
float2 TexcoordUV : TEXCOORD0;
float2 TexcoordST : TEXCOORD1;
};
struct PSInput
{
float4 Pos : POSITION;
float3 Normal : TEXCOORD0;
float2 TexcoordUV : TEXCOORD1;
float2 TexcoordST : TEXCOORD2;
};
uniform matrix modelMatrix;
uniform matrix projectionMatrix;
uniform matrix lookAtMatrix;
PSInput main(VSInput In)
{
PSInput Out = (PSInput) 0;
//projectionMatrix * lookAtMatrix * modelMatrix;
matrix MVP = mul(modelMatrix, lookAtMatrix);
MVP = mul(MVP, projectionMatrix);
Out.Normal = mul(In.Normal, (float3x3)modelMatrix);
Out.Pos = mul(float4(In.Pos, 1.0), MVP);
Out.TexcoordUV = In.TexcoordUV;
Out.TexcoordST = In.TexcoordST;
return Out;
}
same works under OpenGL + GLSL except mix replaced by lerp (I hope its correct).
By example from http://www.two-kings.de/tutorials/dxgraphics/dxgraphics18.html I passing textures with:
ps_pConstantTable.SetInt(m_pD3DDevice, texCountHandle, 0);
for i := 0 to texCount - 1 do begin
tBlp := texture_buf[cx, cy][i];
if tBlp = nil then
break;
m_pD3DDevice.SetTexture(i, tBlp.itex);
ps_pConstantTable.SetInt(m_pD3DDevice, texCountHandle, i);
if i > 0 then begin
// this time, use blending:
m_pD3DDevice.SetTexture(i + 3, AlphaMaps[cx, cy][i]);
end;
end;
so ordinal textures have indices 0-3 and alpha 4-6 (max texCount 4).
The problem is: I can see mesh (terrain) but it is solid black. Am I need something else to set (without shaders it also was black until I assigned materials and light)? Can I pass textures like that? Can I do this with sampler2D as uniform (how)?
Edit: example with sources, shaders, several used textures and alphamaps, vertex data with normals at filebeam http://fbe.am/nm4 added. As small as possible. Also contains DXErr9ab.dll to log errors.
To use texture in pixel shader, you may following below steps
Create texture in your C/C++ file by D3DXCreateTextureFromFile or other functions.
if( FAILED( D3DXCreateTextureFromFile( g_pd3dDevice, "FaceTexture.jpg",
&g_pTexture ) ) )
return E_FAIL;
Declare a D3DXHANDLE and associate it with the texture in your shader file.(you should compile your effect file before this step, effects_ here is a pointer to ID3DXEffect)
texture_handle = effects->GetParameterByName(0, "FaceTexture");
Set the texture in render function
effects_->SetTexture(texture_handle, g_pTexture);
Declare a texture in your shader file
texture FaceTexture;
Declare a sampler in your shader file
// Face texture sampler
sampler FaceTextureSampler = sampler_state
{
Texture = <FaceTexture>;
MipFilter = LINEAR;
MinFilter = LINEAR;
MagFilter = LINEAR;
};
Do sampling in your pixel shader function
float4 BasicPS(OutputVS outputVS) : COLOR
{
float4 Output;
Output = FaceTexture.Sample(FaceTextureSampler, outputVS.texUV);
return Output;
}
If you have DirectX SDK installed, I recommend you to take a look at the sample "BasicHLSL" which has a very basic introduction of Vertex shader and Pixel shader(including texture).

Direct3D11, some part of the model always in front of the others, probably about depth

I'm a new D3D programmer.
When I tried to render a model, I got a strange problem.!
you can see the picture, some part of the model always in front of the others.
the model vertex only contains the following data
{
float x, y, z;
float r, g, b;
float u, v;
}
I tried to render it in opengl and webgl ( http://nalol.azurewebsites.net/ ), it works well. but in D3D11, I got this strange problem.
I tried Google and find something about depth, but i don't know how to deal with it.
the following are some part of my code:
HLSL file
struct vout
{
float4 position : SV_POSITION;
float3 normal : NORMAL;
float2 texcoord : TEXCOORD;
};
vout vshader(float3 position : POSITION, float3 normals : NORMAL, float2 texcoords : TEXCOORD)
{
vout output;
output.position = float4(position, 1);
output.normal = normals ;
output.texcoord = texcoords;
return output;
}
Texture2D shaderTexture;
SamplerState SampleType;
float3 pshader(float3 position : POSITION, float3 normals : NORMAL, float2 texcoords : TEXCOORD) : SV_TARGET
{
return shaderTexture.Sample(SampleType, texcoords);
}
vertex struct
struct lol_skn_vertex {
float position[3];
char bone_index[4]; // for bones and animation, not used here
float bone_weights[4]; // for bones and animation, not used here
float normals[3];
float texcoords[2];
};
input layout object
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 44, D3D11_INPUT_PER_VERTEX_DATA, 0},
};
render function
void RenderFrame(void)
{
FLOAT ColorRGBA[4] = {0.0f, 0.2f, 0.4f, 1.0f};
d3d11_device_context->ClearRenderTargetView(d3d11_view_rt_backbuffer, ColorRGBA);
d3d11_device_context->ClearDepthStencilView(d3d11_view_ds,D3D11_CLEAR_DEPTH|D3D11_CLEAR_STENCIL,1.f,0);
update();
UINT stride = sizeof(lol_skn_vertex);
UINT offset = 0;
d3d11_device_context->IASetVertexBuffers(0, 1, &vertex_buffer, &stride, &offset);
d3d11_device_context->IASetIndexBuffer(index_buffer, DXGI_FORMAT_R16_UINT, 0);
d3d11_device_context->IASetPrimitiveTopology(D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
d3d11_device_context->DrawIndexed(skn.num_indices, 0, 0);
// switch the back buffer and the front buffer
dxgi_swapchain->Present(0, 0);
}
buffer update function
void update() {
// copy the vertices into the buffer
D3D11_MAPPED_SUBRESOURCE ms;
d3d11_device_context->Map(vertex_buffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer
memcpy(ms.pData, skn_vertex_buffer, sizeof(lol_skn_vertex) * skn.num_vertices); // copy the data
// unmap the buffer
SYSTEMTIME SystemTime;
GetSystemTime(&SystemTime);
float angle = (float)SystemTime.wMilliseconds/1000+SystemTime.wSecond;
D3DXMATRIX x;
D3DXMatrixRotationY(&x, angle);
D3DXVec4TransformArray((D3DXVECTOR4 *)ms.pData, sizeof(lol_skn_vertex), (D3DXVECTOR4 *)ms.pData, sizeof(lol_skn_vertex), &x, skn.num_vertices);
// use D3DXVECTOR4 for Transform
d3d11_device_context->Unmap(vertex_buffer, NULL);
}
At last I solved the problem.
I make 2 very stupid mistake.
first: in "input layout object", i use DXGI_FORMAT_R32G32_FLOAT for position, which only contain x and y. so the shader always get 0 on z.
second: my model data is not normalized, which ranged from -50 to 50, so i use D3D11_RASTERIZER_DESC to disable DepthClip and I forgot about it.
Fix this 2 problems and everything works.
And great thank to Gnietschow :)

HLSL point sprite texture coordinates work on ATI not NVIDIA

I am really stuck on this one. My HLSL for rendering point sprites with texture coordinates for a sprite sheet works fine on all ATI cards but not on any NVIDIA cards. On NVIDIA cards the passed texture coordinates map to the whole sprite sheet rather than a portion of it. Strange but it works fine on ATI cards. Am I missing something unique to ATI cards?
Here is my shader
struct VS_INPUT
{
float4 Position : POSITION;
float4 Color : COLOR;
float4 Texture : TEXCOORD0;
//float1 Psize : PSIZE0;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float4 Color : COLOR;
float2 Texture : TEXCOORD0;
float2 Texture_zw : TEXCOORD1;
float1 Psize : PSIZE;
};
float4x4 WorldViewProj;
texture Tex <string name = "sprite_coin_test.dds";>;
sampler2D s_2D;
float offset_x=0.0;
float offset_y=0.0;
sampler S0 = sampler_state
{
Texture = (Tex);
MinFilter = ANISOTROPIC; //LINEAR;
MagFilter = ANISOTROPIC; //LINEAR;
MipFilter = LINEAR;
};
VS_OUTPUT vs_main( in VS_INPUT In )
{
VS_OUTPUT Out=(VS_OUTPUT)0; //create an output vertex
Out.Position = mul(In.Position, WorldViewProj); //apply vertex transformation
Out.Texture = In.Texture;
Out.Texture_zw = float2(In.Texture.z, In.Texture.w);
Out.Color = In.Color;
//Out.Psize = In.Psize;
Out.Psize=(Out.Position.z)*10.0;
return Out; //return output vertex
}
float4 PS_Particle_main(float2 vPos: TEXCOORD0, float2 text_zw: TEXCOORD1) : COLOR
{
vPos.x*=offset_x;
vPos.y*=offset_y;
vPos += float2(text_zw[0], text_zw[1]);
return tex2D(s_2D, vPos);
}
technique RenderVS
{
pass p0
{
AlphaBlendEnable = true;
AlphaTestEnable = false;
SrcBlend = SRCALPHA;
DestBlend = INVSRCALPHA;
POINTSPRITEENABLE = true;
POINTSCALEENABLE = true;
POINTSIZE_MIN = 1.0f;
POINTSIZE_MAX = 400.0f;
POINTSCALE_A = 1.0f;
POINTSCALE_B = 1.0f;
POINTSCALE_C = 1.0f;
ZWRITEENABLE = false;
Sampler[0] = (S0);
VertexShader = compile vs_1_1 vs_main();
PixelShader = compile ps_2_0 PS_Particle_main();
}
}
I had the same problem for a while and it costed me a lot of time. I have not found any documentation about this problematic, but with testing on ATI and NVIDIA devices I found the difference. With pointsprites ATI works all fine, it interpolates the texturecoordinates properly into TEXCOORD0. In contrast NVIDIA does nearly the same, but they write the texturecoordinates in all fields with a TEXCOORD-interpolator. So all information which you pass by texturecoordinates to the pixelshader will be overwritten. I solved this by using a COLOR-interpolator instead of a TEXCOORD-interpolator. Very strange, but it works fine for me :) In your case it would be:
struct VS_OUTPUT
{
float4 Position : POSITION;
float4 Color : COLOR0;
float2 Texture : TEXCOORD0;
float2 Texture_zw : COLOR1;
float1 Psize : PSIZE;
};

Resources