I'm trying to modify GPUImage2 Crosshair generator to replace default crosshairs with gl texture made of UIImage, my image is 128x128 png file.
I'm using GPUImage2 PictureInput for converting UIImage to gl texture and it returns the instance of PictureInput without any issues/warnings.
Next I'm modifying CrosshairVertexShader to add support of the new texture:
uniform sampler2D inputImageTexture;
attribute vec4 position;
varying vec2 centerLocation;
varying float pointSpacing;
void main() {
gl_Position = vec4(((position.xy * 2.0) - 1.0), 0.0, 1.0);
gl_PointSize = crosshairWidth + 1.0;
pointSpacing = 1.0 / crosshairWidth;
centerLocation = vec2(
pointSpacing * ceil(crosshairWidth / 2.0),
pointSpacing * ceil(crosshairWidth / 2.0));
}
then pass it to modified CrosshairFragmentShader and render on screen(?):
uniform sampler2D inputImageTexture;
varying lowp vec3 crosshairColor;
varying highp vec2 centerLocation;
varying highp float pointSpacing;
void main() {
highp vec4 tex = texture2D(inputImageTexture, centerLocation);
gl_FragColor = vec4(tex.r, tex.g, tex.b, tex.a);
}
Here is the code of modified CrosshairGenerator:
import UIKit
public class CrosshairGenerator: ImageGenerator {
public var crosshairWidth:Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } }
public var crosshairColor:Color = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } }
public var crosshairImage: PictureInput = PictureInput(imageName:"monohrome.png") {
didSet {
uniformSettings["inputImageTexture"] = crosshairImage //Here I pass the texture to shader?
crosshairImage.processImage()
}
}
let crosshairShader:ShaderProgram
var uniformSettings = ShaderUniformSettings()
public override init(size:Size) {
crosshairShader = crashOnShaderCompileFailure("CrosshairGenerator"){try sharedImageProcessingContext.programForVertexShader(vertexShader, fragmentShader: fragmentShader)}
super.init(size:size)
({crosshairWidth = 5.0})()
({crosshairColor = Color.green})()
({crosshairImage = PictureInput(imageName:"monohrome.png")})()
}
public func renderCrosshairs(_ positions:[Position]) {
imageFramebuffer.activateFramebufferForRendering()
imageFramebuffer.timingStyle = .stillImage
#if GL
glEnable(GLenum(GL_POINT_SPRITE))
glEnable(GLenum(GL_VERTEX_PROGRAM_POINT_SIZE))
#else
glEnable(GLenum(GL_POINT_SPRITE_OES))
#endif
crosshairShader.use()
uniformSettings.restoreShaderSettings(crosshairShader)
clearFramebufferWithColor(Color.transparent)
guard let positionAttribute = crosshairShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") }
let convertedPositions = positions.flatMap{$0.toGLArray()}
glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, convertedPositions)
glDrawArrays(GLenum(GL_POINTS), 0, GLsizei(positions.count))
notifyTargets()
}
}
the initialization of corner detector remains the same:
FilterOperation(
filter:{HarrisCornerDetector()},
listName:"Harris corner detector",
titleName:"Harris Corner Detector",
sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20),
sliderUpdateCallback: {(filter, sliderValue) in
filter.threshold = sliderValue
},
filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in
let castFilter = filter as! HarrisCornerDetector
// TODO: Get this more dynamically sized
#if os(iOS)
let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640))
#else
let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720))
#endif
crosshairGenerator.crosshairWidth = 30.0
castFilter.cornersDetectedCallback = { corners in
crosshairGenerator.renderCrosshairs(corners)
}
camera --> castFilter
let blendFilter = AlphaBlend()
camera --> blendFilter --> outputView
crosshairGenerator --> blendFilter
return blendFilter
})
)
The code compiles and works fine, but the texture looks empty (coloured squares) when rendered on screen:
if I comment out this line - uniformSettings["inputImageTexture"] = crosshairImage - the result remains the same which makes me think the texture isn't passed to shader at all.
What I'm missing here?
Related
Hello developer friends!
Faced with an incomprehensible problem in the behavior of a custom Core Image filter developed in the Metal language.
The task is not very difficult. Take a LUT file in PNG format and overlay it on an image with a given intensity. IOS has built-in filters, but they do not support a size larger than 64, overlay intensity control, and plus are not very good in terms of performance. I also need to use a LUT of 64 or more.
There seemed to be no difficulty in finding the coordinate of the point in the filter core. But for some reason, some colors are distorted in the output image and it is noticeably darker.
Thank you for any help.
Here is the filter core code:
#include <metal_stdlib>
using namespace metal;
#include <CoreImage/CoreImage.h>
extern "C" {
namespace coreimage {
float4 commitLUT64(sampler image, sampler lut, float intensity) {
float4 color = image.sample(image.coord());
color = clamp(color, float4(0.0f), float4(1.0f));
float red = color.r * 63.0f;
float green = color.g * 63.0f;
float blue = color.b * 63.0f;
float x = red / 512.0f;
float y = green / 512.0f;
float blueY = floor(blue / 8.0f) * 0.125f;
float blueX = 0.125f * ceil(blue - 8.0f * floor(blue / 8.0f)) / 512.0f;
float4 newColor = lut.sample(float2(x + blueX, y + blueY));
return mix(color, float4(newColor.r, newColor.g, newColor.b, color.a), intensity);
}
}
}
Here is the Color LookUp class inherited from CIFilter:
class ColorLookUp: CIFilter {
var inputImage: CIImage?
var inputLUT: CIImage?
var inputIntensity: CGFloat = 1.0
static var kernel: CIKernel = {
guard let url = Bundle.main.url(forResource: "ColorCube.ci", withExtension: "metallib"),
let data = try? Data(contentsOf: url)
else { fatalError("Unable to load metallib") }
guard let kernel = try? CIKernel(functionName: "commitLUT64", fromMetalLibraryData: data)
else { fatalError("Unable to create color kernel") }
return kernel
}()
override var outputImage: CIImage? {
guard let image = inputImage, let lut = inputLUT else { return inputImage }
return ColorLookUp.kernel.apply(
extent: image.extent,
roiCallback: { (index, dest) -> CGRect in if index == 0 { return dest } else { return lut.extent } },
arguments: [image, lut, inputIntensity])
}
}
Here is the project repository with additional materials: https://github.com/VKostin8311/MetalKernelsTestApp.git
I have the shader below where I define a sampler (constexpr sampler textureSampler (mag_filter::linear,min_filter::linear);).
using namespace metal;
struct ProjectedVertex {
'float4 position [[position]];
'float2 textureCoord;
};
fragment float4 fragmentShader(const ProjectedVertex in [[stage_in]],
const texture2d<float> colorTexture [[texture(0)]],
constant float4 &opacity [[buffer(1)]]){
constexpr sampler textureSampler (mag_filter::linear,min_filter::linear);
const float4 colorSample = colorTexture.sample(textureSampler, in.textureCoord);
return colorSample*opacity[0];
}
Now I would like to avoid to hardly define this sampler inside my shader code. I found MTLSamplerState But I don't know how to use it
To create a sampler, first create a MTLSamplerDescriptor object and configure the descriptor’s properties. Then call the newSamplerStateWithDescriptor: method on the MTLDevice object that will use this sampler. After you create the sampler, you can release the descriptor or reconfigure its properties to create other samplers.
// Create default sampler state
MTLSamplerDescriptor *samplerDesc = [MTLSamplerDescriptor new];
samplerDesc.rAddressMode = MTLSamplerAddressModeRepeat;
samplerDesc.sAddressMode = MTLSamplerAddressModeRepeat;
samplerDesc.tAddressMode = MTLSamplerAddressModeRepeat;
samplerDesc.minFilter = MTLSamplerMinMagFilterLinear;
samplerDesc.magFilter = MTLSamplerMinMagFilterLinear;
samplerDesc.mipFilter = MTLSamplerMipFilterNotMipmapped;
id<MTLSamplerState> ss = [device newSamplerStateWithDescriptor:samplerDesc];
Sets a sampler state for the fragment function:
id<MTLRenderCommandEncoder> encoder = [commandBuffer renderCommandEncoderWithDescriptor: passDescriptor];
...
[encoder setFragmentSamplerState: ss atIndex:0];
Accessing from the shader:
fragment float4 albedoMainFragment(ImageColor in [[stage_in]],
texture2d<float> diffuseTexture [[texture(0)]],
sampler smp [[sampler(0)]]) {
float4 color = diffuseTexture.sample(smp, in.texCoord);
return color;
}
How to create SamplerState
First, declare MTLSamplerDescriptor and configure some properties such as addressModes, magFilter, minFilter.
Second, call makeSamplerState method from MTLDevice. Most cases default device.
That
You can use the below code. I hope it helps.
private static func buildSamplerState() -> MTLSamplerState? {
let descriptor = MTLSamplerDescriptor()
descriptor.sAddressMode = .repeat // .clampToEdge, .mirrorRepeat, .clampToZero
descriptor.tAddressMode = .repeat // .clampToEdge, .mirrorRepeat, .clampToZero
descriptor.magFilter = .linear // .nearest
descriptor.minFilter = .linear // .nearest
let samplerState = MTLCreateSystemDefaultDevice()?.makeSamplerState(descriptor: descriptor)
return samplerState
}
How to use it
...
let samplerState = buildSamplerState()
...
// call `makeRenderCommandEncoder` to create commandEncoder from commandBuffer
let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
commandEncoder.setFragmentSamplerState(samplerState, index: 0)
in your fragment shader
fragment float4 exampleShader(VertexIO inputFragment [[stage_in]],
sampler textureSampler [[sampler(0)]],
texture2d<float> inputTexture [[texture(0)]])
{
float2 position = inputFragment.textureCoord;
// ...
return inputTexture.sample(textureSampler, position);
}
attribute vec2 test;
attribute vec2 position;
void main() {
vTexCoord = position ;
vec2 gg = test;
.....
}
what stand getAttribLocation for ?
I used to believe it was the index of the attribute in the code,
but no, it always return 0 for position and 1 for test
?
getAttribLocation gets the location of the attribute. Just because your GPU/driver returns 0 for position and 1 for test doesn't mean that all drivers will.
Also, while debugging it's common to comment out parts of a shader. If an attribute is not used the driver may optimize it away. In your case if you were to comment out whatever lines use position it's likely test would get location 0. If you weren't looking up the location and you assumed test was always at location 1 your code would fail
On the other hand you can set the location before you call linkProgram by calling bindAttribLocation. For example
gl.bindAttribLocation(program, 10, "position");
gl.bindAttribLocation(program, 5, "test");
gl.linkProgram(program);
In which case you don't have to look up the locations.
var vs = `
attribute float position;
attribute float test;
void main() {
gl_Position = vec4(position, test, 0, 1);
}
`;
var fs = `
void main() {
gl_FragColor = vec4(0, 1, 0, 1);
}
`;
function createShader(gl, type, source) {
var s = gl.createShader(type);
gl.shaderSource(s, source);
gl.compileShader(s);
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
console.log(gl.getShaderInfoLog(s));
}
return s;
}
var gl = document.createElement("canvas").getContext("webgl");
var prg = gl.createProgram();
gl.attachShader(prg, createShader(gl, gl.VERTEX_SHADER, vs));
gl.attachShader(prg, createShader(gl, gl.FRAGMENT_SHADER, fs));
gl.bindAttribLocation(prg, 5, "position");
gl.bindAttribLocation(prg, 10, "test");
gl.linkProgram(prg);
if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
console.log(gl.getProgramInfoLog(prg));
}
console.log("test location:", gl.getAttribLocation(prg, "test"));
console.log("position location:", gl.getAttribLocation(prg, "position"));
I took a web demo and tried to create a javascript floorplan object.
It fails with the following error:
vertexAttribPointer: must have valid GL_ARRAY_BUFFER binding
I have tried to create a MWE (or more accurately, a MNonEW) but the demo code includes two libraries. I will leave those out but if the error is not obvious, and you need working code, I can paste those in as well. I will provide link to the files right now.
MWE.html
<!doctype html>
<html>
<head>
<title>WebGL Demo</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<link rel="stylesheet" href="../webgl.css" type="text/css">
<script src="../sylvester.js" type="text/javascript"></script>
<script src="../glUtils.js" type="text/javascript"></script>
<script src="mwe.js" type="text/javascript"></script>
<script id="shader-fs" type="x-shader/x-fragment">
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
}
</script>
<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
</script>
</head>
<body onload="start()">
<canvas id="glcanvas" width="640" height="480">
Your browser doesn't appear to support the <code><canvas></code> element.
</canvas>
</body>
</html>
MWE.js
var canvas;
var gl;
var mvMatrix;
var shaderProgram;
var vertexPositionAttribute;
var textureCoordAttribute;
var perspectiveMatrix;
var floorplan;
var mvMatrixStack = [];
function mvPushMatrix(m) {
if (m) {
mvMatrixStack.push(m.dup());
mvMatrix = m.dup();
} else {
mvMatrixStack.push(mvMatrix.dup());
}
}
function mvPopMatrix() {
if (!mvMatrixStack.length) {
throw("Can't pop from an empty matrix stack.");
}
mvMatrix = mvMatrixStack.pop();
return mvMatrix;
}
function mvRotate(angle, v) {
var inRadians = angle * Math.PI / 180.0;
var m = Matrix.Rotation(inRadians, $V([v[0], v[1], v[2]])).ensure4x4();
multMatrix(m);
}
function start() {
canvas = document.getElementById("glcanvas");
initWebGL(canvas); // Initialize the GL context
if (gl) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clearDepth(1.0); // Clear everything
gl.enable(gl.DEPTH_TEST); // Enable depth testing
gl.depthFunc(gl.LEQUAL); // Near things obscure far things
initShaders();
initBuffers();
setInterval(drawScene, 15);
}
}
function initWebGL() {
gl = null;
try {
gl = canvas.getContext("experimental-webgl");
}
catch(e) {
}
// If we don't have a GL context, give up now
if (!gl) {
alert("Unable to initialize WebGL. Your browser may not support it.");
}
}
function initBuffers() {
var floorcoords = [
[0, 0, 10,0, 10, 10, 0, 10],
[-5,0, -5,5, -10,-5, -10,0]
];
var wallColor = [1,0,0,0];
floorplan = new Floorplan(floorcoords, 8, wallColor);
}
function createVertexBuffer(vertices) {
var b = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, b);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
return b;
}
function Floorplan(coords, height, color) {
var walls = [];
var colors = [];
var index = [0, 1, 2, 0, 2, 3];
var indices = [];
for (var i = 0; i < coords.length; i++) {
for (var j = 0; j < coords[i].length; j+=4) {
walls.push(coords[j], coords[j+1], 0);
walls.push(coords[j+2], coords[j+3], 0);
walls.push(coords[j+2], coords[j+3], height);
walls.push(coords[j], coords[j+1], height);
indices = indices.concat(index);
for (var j = 0; j < 6; j++)
colors = colors.concat(color);
}
}
this.colorBuf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.colorBuf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
this.walls = createVertexBuffer(walls);
this.indices = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indices);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indices), gl.STATIC_DRAW);
console.log(indices.length + "," + colors.length);
}
Floorplan.prototype.draw = function() {
gl.bindBuffer(gl.ARRAY_BUFFER, this.walls);
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.colors);
gl.vertexAttribPointer(this.colorBuf, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indices);
setMatrixUniforms();
gl.drawElements(gl.TRIANGLES, 24, gl.UNSIGNED_SHORT, 0);
}
var z = -6;
function drawScene() {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
perspectiveMatrix = makePerspective(45, 640.0/480.0, 0.1, 100.0);
loadIdentity();
mvTranslate([0.0, 0.0, z]);
floorplan.draw();
}
function initShaders() {
var fragmentShader = getShader(gl, "shader-fs");
var vertexShader = getShader(gl, "shader-vs");
// Create the shader program
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Unable to initialize the shader program.");
}
gl.useProgram(shaderProgram);
vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(vertexPositionAttribute);
textureCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord");
gl.enableVertexAttribArray(textureCoordAttribute);
}
//
// getShader
//
// Loads a shader program by scouring the current document,
// looking for a script with the specified ID.
//
function getShader(gl, id) {
var shaderScript = document.getElementById(id);
// Didn't find an element with the specified ID; abort.
if (!shaderScript) {
return null;
}
// Walk through the source element's children, building the
// shader source string.
var theSource = "";
var currentChild = shaderScript.firstChild;
while(currentChild) {
if (currentChild.nodeType == 3) {
theSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
// Now figure out what type of shader script we have,
// based on its MIME type.
var shader;
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null; // Unknown shader type
}
gl.shaderSource(shader, theSource); // Send the source to the shader object
gl.compileShader(shader); // Compile the shader program
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
//
// Matrix utility functions
//
function loadIdentity() {
mvMatrix = Matrix.I(4);
}
function multMatrix(m) {
mvMatrix = mvMatrix.x(m);
}
function mvTranslate(v) {
multMatrix(Matrix.Translation($V([v[0], v[1], v[2]])).ensure4x4());
}
function setMatrixUniforms() {
var pUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
gl.uniformMatrix4fv(pUniform, false, new Float32Array(perspectiveMatrix.flatten()));
var mvUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");
gl.uniformMatrix4fv(mvUniform, false, new Float32Array(mvMatrix.flatten()));
}
The libraries used in this demo code are from:
https://github.com/mdn/webgl-examples/tree/gh-pages/tutorial
gl.vertexAttribPointer() takes as its first argument the index of a vertex attribute defined in your shader.
For instance, your first call to gl.vertexAttribPointer() goes something like this:
// In initShaders()...
vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(vertexPositionAttribute);
// vertexPositionAttribute now contains an index to the attribute
// "aVertexPosition" in the vertex shader
// ...
// Later, in Floorplan.draw():
gl.bindBuffer(gl.ARRAY_BUFFER, this.walls); // Data from this.walls is bound to gl.ARRAY_BUFFER
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
// The current gl.ARRAY_BUFFER is bound to "aVertexPosition"
But in your second call:
gl.bindBuffer(this.colorBuf, 4, gl.FLOAT, false, 0, 0);
The problem here is that this.colorBuf isn't an index to a vertex attribute. If I had to guess, I'd say your code initially had a per-vertex color attribute which has been discarded in favor of texture coordinates.
So probably just get rid of that colorBuf stuff, and bind some data to aTextureCoord, and you may be in business.
Without looking at your code the specific error means you never called gl.bindBuffer(gl.ARRAY_BUFFER, someBuffer) before calling gl.vertexBufferPointer.
gl.vertexBufferPointer copies the reference to the last buffer you bound to gl.ARRAY_BUFFER to the attribute's buffer reference.
See this answer:
What is the logic of binding buffers in webgl?
I am trying to draw/remove geometrical shapes on render target which displays a image.
During each frame render, a new image is rendered to the render target by updating the resource via texture mapping which works perfectly as expected.
Now, I'm trying to draw a new geometrical shape filled with a solid color on top of the render target, which is only done during the rendering of every 10th frame.
However currently stuck as to how i should approach this.
I'm using DirectX 11 on windows 7 PC with C# (slimDx or sharpDx for DirectX).
Any suggestion would be great.
Thanks.
Code:
During rendring loop i added the code below to draw the overlay which is a triangle in my case.
var device = this.Device;
var context = device.ImmediateContext;
var effectsFileResource = Properties.Resources.ShapeEffect;
ShaderBytecode shaderByteCode = ShaderBytecode.Compile(effectsFileResource, "fx_5_0", ShaderFlags.EnableStrictness | ShaderFlags.Debug, EffectFlags.None);
var effect = new Effect(device, shaderByteCode);
// create triangle vertex data, making sure to rewind the stream afterward
var verticesTriangle = new DataStream(VertexPositionColor.SizeInBytes * 3, true, true);
verticesTriangle.Write(new VertexPositionColor(new Vector3(0.0f, 0.5f, 0.5f), new Color4(1.0f, 0.0f, 0.0f, 1.0f)));
verticesTriangle.Write(new VertexPositionColor(new Vector3(0.5f, -0.5f, 0.5f), new Color4(0.0f, 1.0f, 0.0f, 1.0f)));
verticesTriangle.Write(new VertexPositionColor(new Vector3(-0.5f, -0.5f, 0.5f), new Color4(0.0f, 0.0f, 1.0f, 1.0f)));
verticesTriangle.Position = 0;
// create the triangle vertex layout and buffer
var layoutColor = new InputLayout(device, effect.GetTechniqueByName("Color").GetPassByIndex(0).Description.Signature, VertexPositionColor.inputElements);
var vertexBufferColor = new SharpDX.Direct3D11.Buffer(device, verticesTriangle, (int)verticesTriangle.Length, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
verticesTriangle.Close();
var srv = new ShaderResourceView(device, this.RenderTarget);
effect.GetVariableByName("g_Overlay").AsShaderResource().SetResource(srv);
// Think of the shared textureD3D10 as an overlay.
// The overlay needs to show the 2d content but let the underlying triangle (or whatever)
// show thru, which is accomplished by blending.
var bsd = new BlendStateDescription();
bsd.RenderTarget[0].IsBlendEnabled = true;
bsd.RenderTarget[0].SourceBlend = BlendOption.SourceColor;
bsd.RenderTarget[0].DestinationBlend = BlendOption.BlendFactor;
bsd.RenderTarget[0].BlendOperation = BlendOperation.Add;
bsd.RenderTarget[0].SourceAlphaBlend = BlendOption.One;
bsd.RenderTarget[0].DestinationAlphaBlend = BlendOption.Zero;
bsd.RenderTarget[0].AlphaBlendOperation = BlendOperation.Add;
bsd.RenderTarget[0].RenderTargetWriteMask = ColorWriteMaskFlags.All;
var blendStateTransparent = new BlendState(device, bsd);
context.InputAssembler.InputLayout = layoutColor;
context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleStrip;
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBufferColor, VertexPositionColor.SizeInBytes, 0));
context.OutputMerger.BlendState = blendStateTransparent;
var currentTechnique = effect.GetTechniqueByName("Color");
for (var pass = 0; pass < currentTechnique.Description.PassCount; ++pass)
{
using (var effectPass = currentTechnique.GetPassByIndex(pass))
{
System.Diagnostics.Debug.Assert(effectPass.IsValid, "Invalid EffectPass");
effectPass.Apply(context);
}
context.Draw(3, 0);
};
srv.Dispose();
Also below is the shader file for the effect:
Texture2D g_Overlay;
SamplerState g_samLinear
{
Filter = MIN_MAG_MIP_LINEAR;
AddressU = CLAMP;
AddressV = CLAMP;
};
// ------------------------------------------------------
// A shader that accepts Position and Color
// ------------------------------------------------------
struct ColorVS_IN
{
float4 pos : POSITION;
float4 col : COLOR;
};
struct ColorPS_IN
{
float4 pos : SV_POSITION;
float4 col : COLOR;
};
ColorPS_IN ColorVS(ColorVS_IN input)
{
ColorPS_IN output = (ColorPS_IN)0;
output.pos = input.pos;
output.col = input.col;
return output;
}
float4 ColorPS(ColorPS_IN input) : SV_Target
{
return input.col;
}
// ------------------------------------------------------
// A shader that accepts Position and Texture
// Used as an overlay
// ------------------------------------------------------
struct OverlayVS_IN
{
float4 pos : POSITION;
float2 tex : TEXCOORD0;
};
struct OverlayPS_IN
{
float4 pos : SV_POSITION;
float2 tex : TEXCOORD0;
};
OverlayPS_IN OverlayVS(OverlayVS_IN input)
{
OverlayPS_IN output = (OverlayPS_IN)0;
output.pos = input.pos;
output.tex = input.tex;
return output;
}
float4 OverlayPS(OverlayPS_IN input) : SV_Target
{
float4 color = g_Overlay.Sample(g_samLinear, input.tex);
return color;
}
// ------------------------------------------------------
// Techniques
// ------------------------------------------------------
technique11 Color
{
pass P0
{
SetGeometryShader(0);
SetVertexShader(CompileShader(vs_4_0, ColorVS()));
SetPixelShader(CompileShader(ps_4_0, ColorPS()));
}
}
technique11 Overlay
{
pass P0
{
SetGeometryShader(0);
SetVertexShader(CompileShader(vs_4_0, OverlayVS()));
SetPixelShader(CompileShader(ps_4_0, OverlayPS()));
}
}
The above code has been taken from : SharedResources using SharpDX