This is a beginner question about usage of HLSL shaders in MonoGame. I have a texture with a multiple areas of different colors and want to highlight borders of such areas.
Preparing sprites batch:
batch.Begin( SpriteSortMode.Deferred,
BlendState.AlphaBlend,
SamplerState.LinearWrap,
null, null, bordersEffect,
viewer.Camera.GetViewMatrix());
In the bordersEffect I am loading my .fx with the following:
Texture2D SpriteTexture;
float2 SpriteTextureSize;
sampler2D SpriteTextureSampler = sampler_state
{
Texture = <SpriteTexture>;
};
struct VertexShaderOutput
{
float4 Position : SV_POSITION;
float4 Color : COLOR0;
float2 TextureCoordinates : TEXCOORD0;
};
float4 MainPS(VertexShaderOutput input) : COLOR
{
float offx = 1.0 / SpriteTextureSize[0];
float offy = 1.0 / SpriteTextureSize[1];
float4 pixelColor = tex2D(SpriteTextureSampler, input.TextureCoordinates);
float4 topLeft = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(-offx, -offy));
float4 top = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(0, -offy));
float4 topRight = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(offx, -offy));
float4 left = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(-offx, 0));
float4 right = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(offx, 0));
float4 bottomLeft = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(-offx, offy));
float4 bottom = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(0, offy));
float4 bottomRight = tex2D(SpriteTextureSampler, input.TextureCoordinates + float2(offx, offy));
if (any(pixelColor != topLeft) || any(pixelColor != top) || any(pixelColor != topRight) ||
any(pixelColor != left) || any(pixelColor != right) ||
any(pixelColor != bottomLeft) || any(pixelColor != bottom) || any(pixelColor != bottomRight))
{
// this is a border pixel
return pixelColor + float4(0.1, 0.1, 0.1, 0);
}
return pixelColor * input.Color;
}
technique SpriteDrawing
{
pass P0
{
PixelShader = compile PS_SHADERMODEL MainPS();
}
};
This highlights border pixels, but when I zoom in the camera, the edge between color areas is blended but the edge of the highlight and the color itself is not blended.
edges not blended
What am I missing?
Related
I'm currently trying to add multiple point lights to my game. What I have done appears to be mostly working, except for a small problem of blending light falloff. Here's two images to show you what's happening. In the first one, Light Falloff is commented out. Both point lights appear correctly.
And here's the second image, where I have light falloff enabled. You will see that only light #2 is "mostly" visible. There are traces of light #1, but for the most part, light #1 appears to be overridden by light #2's falloff. In other words, each consecutive light's falloff overrides the light from previous lights.
Does anyone know how to add falloff for multiple point lights? I'm sure I'm doing something slightly wrong, and that's why the lights are not properly accumulated.
Here's my shader:
struct Vertex
{
float4 pos : POSITION;
float2 tex : TEXTURE;
float3 norm : NORMAL;
};
struct PixelShaderArgs
{
float4 pos : SV_POSITION;
float2 col : TEXTURE;
float3 norm : NORMAL;
float3 worldPos : POSITION;
};
struct PointLightShaderArgs
{
float3 pos;
float radius;
float intensity;
float3 padding;
float4 ambient;
float4 diffuse;
};
Texture2D ShaderTexture : register(t0);
SamplerState Sampler : register(s0);
float4x4 localMatrix : register(b0);
cbuffer ShaderDataBuffer : register(b1)
{
float2 TextureResolution;
};
cbuffer cbPerFrame : register(b3)
{
PointLightShaderArgs light[8];
};
cbuffer WorldPositionBuffer : register(b4)
{
float4x4 World;
};
PixelShaderArgs VertexShaderMain(Vertex vertex)
{
PixelShaderArgs output;
output.pos = mul(vertex.pos, localMatrix);
output.col = vertex.tex;
output.norm = mul(vertex.norm, World);
output.worldPos = mul(vertex.pos, World);
return output;
}
int2 convertUVToPixel(float u, float v)
{
int width = TextureResolution.x;
int height = TextureResolution.y;
int xCoordinate = floor(u * width);
int yCoordinate = floor(v * height);
return int2(xCoordinate % width, yCoordinate % height);
}
float Falloff(float distance, float radius)
{
return clamp(1.0f - (distance / radius), 0.0, 1.0);
}
#define ATTENUATION_CONSTANT 1.0f // 0% Constant
#define ATTENUATION_LINEAR 0.0f // 100% Linear
#define ATTENUATION_QUADRATIC 0.0f // 100% Quadratic
float4 PixelShaderMain(PixelShaderArgs pixelShaderArgs) : SV_Target
{
float u = pixelShaderArgs.col.x;
float v = pixelShaderArgs.col.y;
// Lighting
float3 fragColor = float3(0.0f, 0.0f, 0.0f);
float4 diffuse = ShaderTexture.Load(int3(convertUVToPixel(u, v), 0));
for (int i = 0; i < 2; i++)
{
float3 ambient = diffuse * light[i].ambient;
pixelShaderArgs.norm = normalize(pixelShaderArgs.norm);
float3 lightToPixelVec = light[i].pos - pixelShaderArgs.worldPos;
float distance = length(lightToPixelVec);
float luminosity = dot(lightToPixelVec / distance, pixelShaderArgs.norm);
float intensity = 1.00f;
if (luminosity > 0.0f)
{
// Do lighting attenuation
fragColor += luminosity * diffuse * light[i].diffuse;
fragColor /= ATTENUATION_CONSTANT + (ATTENUATION_LINEAR * distance) + (ATTENUATION_QUADRATIC * (distance * distance));
fragColor *= light[i].intensity; // multiply the final result by the intensity.
fragColor *= Falloff(distance, light[i].radius); // This is what's causing the problem!!
//fragColor = saturate(fragColor + ambient);
}
}
return float4(fragColor, diffuse.a);
}
I figured this out. The solution was to move the falloff calculation up and inline it with the following line: fragColor += luminosity * diffuse * light[i].diffuse * Falloff(distance,light[i].radius);
This results the correcting falloff blending, shown in this picture:
and another picture showing three overlapped point lights:
Here's the updated shader (A lot of changes were made from the first one because I'm actually posting this answer late)
struct Vertex
{
float4 pos : POSITION;
float2 tex : TEXTURE;
float3 norm : NORMAL;
};
struct PixelShaderArgs
{
float4 pos : SV_POSITION;
float2 col : TEXTURE;
float3 norm : NORMAL;
float3 worldPos : POSITION;
};
struct PointLightShaderArgs
{
float3 pos;
float radius;
float intensity;
float3 padding;
float4 ambient;
float4 diffuse;
};
Texture2D ShaderTexture : register(t0);
SamplerState Sampler : register(s0);
float4x4 localMatrix : register(b0);
cbuffer ShaderDataBuffer : register(b1)
{
float2 TextureResolution;
};
cbuffer cbPerFrame : register(b3)
{
PointLightShaderArgs light[32];
};
cbuffer WorldPositionBuffer : register(b4)
{
float4x4 World;
};
PixelShaderArgs VertexShaderMain(Vertex vertex)
{
PixelShaderArgs output;
output.pos = mul(vertex.pos, localMatrix);
output.col = vertex.tex;
output.norm = mul(vertex.norm, World);
output.worldPos = mul(vertex.pos, World);
return output;
}
int2 convertUVToPixel(float u, float v)
{
int width = TextureResolution.x;
int height = TextureResolution.y;
int xCoordinate = floor(u * width);
int yCoordinate = floor(v * height);
return int2(xCoordinate % width, yCoordinate % height);
}
float Falloff(float distance, float radius)
{
return clamp(1.0f - (distance / radius), 0.0, 1.0);
}
#define ATTENUATION_CONSTANT 1.0f // 0% Constant
#define ATTENUATION_LINEAR 0.0f // 100% Linear
#define ATTENUATION_QUADRATIC 0.0f // 100% Quadratic; Democrats are domestic terrorists
float4 PixelShaderMain(PixelShaderArgs pixelShaderArgs) : SV_Target
{
float u = pixelShaderArgs.col.x;
float v = pixelShaderArgs.col.y;
// Lighting
float3 fragColor = float3(0.0f, 0.0f, 0.0f);
float4 diffuse = ShaderTexture.Load(int3(convertUVToPixel(u, v), 0));
for (int i = 0; i < 32; i++)
{
float3 ambient = diffuse * light[i].ambient;
pixelShaderArgs.norm = normalize(pixelShaderArgs.norm);
float3 lightToPixelVec = light[i].pos - pixelShaderArgs.worldPos;
float distance = length(lightToPixelVec);
float luminosity = dot(lightToPixelVec / distance, pixelShaderArgs.norm);
float intensity = 1.00f;
if (luminosity > 0.0f)
{
// Do lighting attenuation
fragColor += luminosity * diffuse * light[i].diffuse * Falloff(distance,light[i].radius);
fragColor /= ATTENUATION_CONSTANT + (ATTENUATION_LINEAR * distance) + (ATTENUATION_QUADRATIC * (distance * distance));
fragColor *= light[i].intensity; // multiply the final result by the intensity.
}
fragColor = saturate(fragColor + ambient);
}
return float4(fragColor, diffuse.a);
}
Trying to create deferred screenspace decals rendering in Metal by following this article. Though can't seem to figure it out...
These are bounds of the decal...
Actual result...
Potential issue
So apparently it doesn't think that the decal is intersecting the mesh, I'm sampling the depth value correctly, but then when calculating the actual position the the pixel in 3D space something doesn't add up.
Code
vertex VertexOut vertex_decal(
const VertexIn in [[ stage_in ]],
constant DecalVertexUniforms &uniforms [[ buffer(2) ]]
) {
VertexOut out;
out.position = uniforms.projectionMatrix * uniforms.viewMatrix * uniforms.modelMatrix * in.position;
out.viewPosition = (uniforms.viewMatrix * uniforms.modelMatrix * in.position).xyz;
out.normal = uniforms.normalMatrix * in.normal;
out.uv = in.uv;
return out;
}
fragment float4 fragment_decal(
const VertexOut in [[ stage_in ]],
constant DecalFragmentUniforms &uniforms [[ buffer(3) ]],
depth2d<float, access::sample> depthTexture [[ texture(0) ]]
) {
constexpr sampler textureSampler (mag_filter::nearest, min_filter::nearest);
float2 resolution = float2(
depthTexture.get_width(),
depthTexture.get_height()
);
float2 textureCoordinate = in.position.xy / resolution;
float depth = depthTexture.sample(textureSampler, textureCoordinate);
float3 viewRay = in.viewPosition * (uniforms.farClipPlane / in.viewPosition.z);
float3 viewPosition = viewRay * depth;
float3 worldPositon = (uniforms.inverseViewMatrix * float4(viewPosition, 1)).xyz;
float3 objectPositon = (uniforms.inverseModelMatrix * float4(worldPositon, 1)).xyz;
float distX = 0.5 - abs(objectPositon.x);
float distY = 0.5 - abs(objectPositon.y);
float distZ = 0.5 - abs(objectPositon.z);
if(distX > 0 && distY > 0 && distZ > 0) {
return float4(1, 0, 0, 0.5);
} else {
discard_fragment();
}
}
EDIT:
Made a bit of a progress, now it at least renders something, it clips the decal box correctly once its outside of some mesh, but the parts on the mesh are still not completely correct.. to be exact it also renders sides of the box that are overlapping with the mesh under the decal (you can see it on the image below as the red there is a bit darker)
And to add more details, the depthTexture is passed from previous "pass" so it only contains the icosphere on it, and the decal cube shader doesn't write to the depthTexture, just reads from it.
and depth stencil is defined as...
let stencilDescriptor = MTLDepthStencilDescriptor()
stencilDescriptor.depthCompareFunction = .less
stencilDescriptor.isDepthWriteEnabled = false
and render pipeline is defined as...
let renderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptor.vertexDescriptor = vertexDescriptor
renderPipelineDescriptor.vertexFunction = vertexLibrary.makeFunction(name: "vertex_decal")
renderPipelineDescriptor.fragmentFunction = fragmentLibrary.makeFunction(name: "fragment_decal")
if let colorAttachment = renderPipelineDescriptor.colorAttachments[0] {
colorAttachment.pixelFormat = .bgra8Unorm
colorAttachment.isBlendingEnabled = true
colorAttachment.rgbBlendOperation = .add
colorAttachment.sourceRGBBlendFactor = .sourceAlpha
colorAttachment.destinationRGBBlendFactor = .oneMinusSourceAlpha
}
renderPipelineDescriptor.colorAttachments[1].pixelFormat = .bgra8Unorm
renderPipelineDescriptor.depthAttachmentPixelFormat = .depth32Float
so the current issue is that it discards only pixels that are out of the mesh that its being projected on, instead of all pixels that are "above" the surface of the icosphere
New Shader Code
fragment float4 fragment_decal(
const VertexOut in [[ stage_in ]],
constant DecalFragmentUniforms &uniforms [[ buffer(3) ]],
depth2d<float, access::sample> depthTexture [[ texture(0) ]]
) {
constexpr sampler textureSampler (mag_filter::nearest, min_filter::nearest);
float2 resolution = float2(
depthTexture.get_width(),
depthTexture.get_height()
);
float2 textureCoordinate = in.position.xy / resolution;
float depth = depthTexture.sample(textureSampler, textureCoordinate);
float3 screenPosition = float3(textureCoordinate * 2 - 1, depth);
float4 viewPosition = uniforms.inverseProjectionMatrix * float4(screenPosition, 1);
float4 worldPosition = uniforms.inverseViewMatrix * viewPosition;
float3 objectPosition = (uniforms.inverseModelMatrix * worldPosition).xyz;
if(abs(worldPosition.x) > 0.5 || abs(worldPosition.y) > 0.5 || abs(worldPosition.z) > 0.5) {
discard_fragment();
} else {
return float4(1, 0, 0, 0.5);
}
}
Finally managed to get it to work properly, so the final shader code is...
the issues that the latest shader had were...
Flipped Y axis on screenPosition
Not converting the objectPosition to NDC space (localPosition)
fragment float4 fragment_decal(
const VertexOut in [[ stage_in ]],
constant DecalFragmentUniforms &uniforms [[ buffer(3) ]],
depth2d<float, access::sample> depthTexture [[ texture(0) ]],
texture2d<float, access::sample> colorTexture [[ texture(1) ]]
) {
constexpr sampler depthSampler (mag_filter::linear, min_filter::linear);
float2 resolution = float2(
depthTexture.get_width(),
depthTexture.get_height()
);
float2 depthCoordinate = in.position.xy / resolution;
float depth = depthTexture.sample(depthSampler, depthCoordinate);
float3 screenPosition = float3((depthCoordinate.x * 2 - 1), -(depthCoordinate.y * 2 - 1), depth);
float4 viewPosition = uniforms.inverseProjectionMatrix * float4(screenPosition, 1);
float4 worldPosition = uniforms.inverseViewMatrix * viewPosition;
float4 objectPosition = uniforms.inverseModelMatrix * worldPosition;
float3 localPosition = objectPosition.xyz / objectPosition.w;
if(abs(localPosition.x) > 0.5 || abs(localPosition.y) > 0.5 || abs(localPosition.z) > 0.5) {
discard_fragment();
} else {
float2 textureCoordinate = localPosition.xy + 0.5;
float4 color = colorTexture.sample(depthSampler, textureCoordinate);
return float4(color.rgb, 1);
}
}
The final results look like this (red are pixels that are kept, blue pixels are discarded)...
I need to center it on a 2D texture when adjusting fit/fill texture in the view, but I can't configure uv coords.
Original image
When adjust fill, show the first part of the image not the center:
Fill image
when adjust fit, not get the correct center:
Fit image
float2 adjustPos(float2 size,
float2 uv) {
uv.x /= size.x;
uv.y /= size.y;
uv.y = 1.0f - uv.y;
return uv;
}
float2 scaleTexture(texture2d<float, access::sample> tex2d,
float2 size,
float2 uv,
int mode) {
int width = tex2d.get_width();
int height = tex2d.get_height();
float widthRatio = size.x/width;
float heightRatio = size.y/height;
float2 pos;
if (mode == 0) { // Aspect Fit
int2 newSize = int2(width*widthRatio, height*widthRatio);
pos = adjustPos(float2(newSize), uv);
float y = (uv.y/size.y) / 2.0;
y = y-pos.y;
y = 1.0f-y;
pos.y = y;
} else if (mode == 1) { // Aspect Fill
int2 newSize = int2(width*heightRatio, height*heightRatio);
pos = adjustPos(float2(newSize), uv);
if (newSize.x != size.x) {
pos.x = 0.5f + ((pos.x - 0.5f) * (1.0f - (heightRatio/100)));
}
} else {
float scale = min(widthRatio, heightRatio);
int2 newSize = int2(width*scale, height*scale);
pos = adjustPos(float2(newSize), uv);
}
return pos;
}
You can use this shader and customize it as per your requirements. This solution will let you make a texture fit fill within a given size.
The following metal shader takes a texture, an output size, expected content mode, and returns a fit/fill image within the given size.
fragment float4 fragment_aspect_fitfill(
VertexOut vertexIn [[stage_in]],
texture2d<float, access::sample> sourceTexture [[texture(0)]],
sampler sourceSampler [[sampler(0)]],
constant float2 &size [[ buffer(0) ]],
constant float &contentMode [[ buffer(1) ]])
{
float2 uv = vertexIn.textureCoordinate;
//Calculate Aspect Ration for both Texture and Expected output texture
float textureAspect = (float)sourceTexture.get_width() / (float)sourceTexture.get_height();
float frameAspect = (float)size.x / (float)size.y;
float scaleX = 1, scaleY = 1;
float textureFrameRatio = textureAspect / frameAspect;
bool portraitTexture = textureAspect < 1;
bool portraitFrame = frameAspect < 1;
// Content mode 0 is for aspect Fill, 1 is for Aspect Fit
if(contentMode == 0.0) {
if(portraitFrame)
scaleX = 1.f / textureFrameRatio;
else
scaleY = textureFrameRatio;
} else if(contentMode == 1.0) {
if(portraitFrame)
scaleY = textureFrameRatio;
else
scaleX = 1.f / textureFrameRatio;
}
float2 textureScale = float2(scaleX, scaleY);
float2 vTexCoordinate = textureScale * (uv - 0.5) + 0.5;
return sourceTexture.sample(sourceSampler, vTexCoordinate);
}
*Tips: This MSL uses some struct of MetalPetal.
I'm using Frank Luna's book to learn DirectX 10 but I'm a little confused with some of the lighting I'm getting. I've got a couple of objects, a directional light and a point light that I can move around the scene. My problem is that when I move the point light around, the light moves but gets darker the further it gets from the origin. When at the origin it has intense white light. Why is this, and how can I get it working properly? Thanks.
Here's the code for the point light:
float3 PointLight(SurfaceInfo v, Light L, float3 eyePos)
{
float3 litColor = float3(0.0f, 0.0f, 0.0f);
// The vector from the surface to the light.
float3 lightVec = L.pos - v.pos;
// The distance from surface to light.
float d = length(lightVec);
if( d > L.range )
return float3(0.0f, 0.0f, 0.0f);
// Normalize the light vector.
lightVec /= d;
// Add the ambient light term.
litColor += v.diffuse * L.ambient;
// Add diffuse and specular term, provided the surface is in
// the line of site of the light.
float diffuseFactor = dot(lightVec, v.normal);
[branch]
if( diffuseFactor > 0.0f )
{
float specPower = max(v.spec.a, 1.0f);
float3 toEye = normalize(eyePos - v.pos);
float3 R = reflect(-lightVec, v.normal);
float specFactor = pow(max(dot(R, toEye), 0.0f), specPower);
// diffuse and specular terms
litColor += diffuseFactor * v.diffuse * L.diffuse;
litColor += specFactor * v.spec * L.spec;
}
// attenuate
return litColor / dot(L.att, float3(1.0f, d, d*d));
}
The Effect file:
#include "lighthelper.fx"
#define MaxLights 2
cbuffer cbPerFrame
{
uniform extern Light gLight[MaxLights];
int gLightType;
float3 gEyePosW;
};
bool gSpecularEnabled;
cbuffer cbPerObject
{
float4x4 gWorld;
float4x4 gWVP;
float4x4 gTexMtx;
};
// Nonnumeric values cannot be added to a cbuffer.
Texture2D gDiffuseMap;
Texture2D gSpecMap;
SamplerState gTriLinearSam
{
Filter = MIN_MAG_MIP_LINEAR;
AddressU=Mirror;
AddressV=Mirror;
};
struct VS_IN
{
float3 posL : POSITION;
float3 normalL : NORMAL;
float2 texC : TEXCOORD;
float4 diffuse : DIFFUSE;
float4 spec : SPECULAR;
};
struct VS_OUT
{
float4 posH : SV_POSITION;
float3 posW : POSITION;
float3 normalW : NORMAL;
float2 texC : TEXCOORD;
float4 diffuse : DIFFUSE;
float4 spec : SPECULAR;
};
VS_OUT VS(VS_IN vIn)
{
VS_OUT vOut;
// Transform to world space space.
vOut.posW = mul(float4(vIn.posL, 1.0f), gWorld);
vOut.normalW = mul(float4(vIn.normalL, 0.0f), gWorld);
// Transform to homogeneous clip space.
vOut.posH = mul(float4(vIn.posL, 1.0f), gWVP);
// Output vertex attributes for interpolation across triangle.
vOut.texC = mul(float4(vIn.texC, 0.0f, 1.0f), gTexMtx);
vOut.diffuse = vIn.diffuse;
vOut.spec = vIn.spec;
return vOut;
}
float4 PS(VS_OUT pIn) : SV_Target
{
// Get materials from texture maps.
float4 diffuse = gDiffuseMap.Sample( gTriLinearSam, pIn.texC );
float4 spec = gSpecMap.Sample( gTriLinearSam, pIn.texC );
// Map [0,1] --> [0,256]
spec.a *= 256.0f;
// Interpolating normal can make it not be of unit length so normalize it.
float3 normalW = normalize(pIn.normalW);
// Compute the lit color for this pixel.
SurfaceInfo v = {pIn.posW, normalW, diffuse, spec};
float3 litColor;
for(int i = 0; i < MaxLights; ++i)
{
if( i==0) // Parallel
{
//litColor += ParallelLight(v, gLight[i], gEyePosW);
}
else // Point
{
litColor += PointLight(v, gLight[i], gEyePosW);
}
}
return float4(litColor, diffuse.a);}
technique10 TexTech
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
SetGeometryShader( NULL );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
}
}
And the Light defines:
myLight[1].ambient = D3DXCOLOR(0.4f, 0.8f, 0.4f, 1.0f);
myLight[1].diffuse = D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f);
myLight[1].specular = D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f);
myLight[1].att.x = 0.0f;
myLight[1].att.y = 0.1f;
myLight[1].att.z = 0.0f;
myLight[1].range = 50.0f;
I created Projector with:
Matrix.CreateLookAt(position, direction, Vector3.Up);
Matrix.CreatePerspectiveFieldOfView(MathHelper.ToRadians(45), 1, 1, 2);
I pass to the shader multiplication of these matrices (in shader called View), then in shader I do:
float4 proj(float3 Position)
{
float4 texCoord = mul(float4(Position, 1.0), View);
texCoord.x = ( (texCoord.x / texCoord.w)/2) + 0.5;
texCoord.y = (-(texCoord.y / texCoord.w)/2) + 0.5;
return tex2D(shape, texCoord.xy);
}
uvw of texture is Clamped. I use it in light stage of deffered shading. Resulting image (red arrow is the correct direction):
image
What should I do to make it go only in correct direction?
SOLVED:
The problem was back projection wich was simply solved:
float4 proj(float3 Position)
{
float4 texCoord = mul(float4(Position, 1.0), View);
if(texCoord.z < 0)
return 0;
texCoord.x = ( (texCoord.x / texCoord.w)/2) + 0.5;
texCoord.y = (-(texCoord.y / texCoord.w)/2) + 0.5;
return tex2D(shape, texCoord.xy);
}
The problem is back projection wich is simply solved:
float4 proj(float3 Position)
{
float4 texCoord = mul(float4(Position, 1.0), View);
if(texCoord.z < 0)
return 0;
texCoord.x = ( (texCoord.x / texCoord.w)/2) + 0.5;
texCoord.y = (-(texCoord.y / texCoord.w)/2) + 0.5;
return tex2D(shape, texCoord.xy);
}