OpenGL-ES: selective blending - ios

I'm using GPUImage's corner detector for extracting corners from camera captured frames. I'd like to render sparkles at the corners coordinates. When I've got the corners coordinates I pass it to my sparkles generator (pretty similar to GPUImage's crosshair generator):
public func renderSparkles(_ positions:[Position]) {
imageFramebuffer.activateFramebufferForRendering()
imageFramebuffer.timingStyle = .stillImage
glEnable(GLenum(GL_POINT_SPRITE_OES))
sparklesShader.use()
uniformSettings.restoreShaderSettings(sparklesShader)
clearFramebufferWithColor(Color.transparent)
guard let positionAttribute = sparklesShader.attributeIndex("position") else {
fatalError("A position attribute was missing from the shader program during rendering.")
}
let convertedPositions = positions.flatMap{$0.toGLArray()}
glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, convertedPositions)
glDrawArrays(GLenum(GL_POINTS), 0, GLsizei(positions.count))
notifyTargets()
}
In the fragment shader of generator I draw sparkle at the corner coordinate:
uniform lowp vec3 crosshairColor;
varying highp vec2 centerLocation;
void main()
{
lowp vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);
lowp float r = length(distanceFromCenter)*0.042;
lowp float a = atan(distanceFromCenter.y,distanceFromCenter.x);
lowp float f = abs(cos(a*2.0))*0.100*-0.322/0.224;
lowp float b = abs(cos(a*2.0))*0.078*-0.882/17.088;
lowp float c = abs(cos(a*2.0))*0.030*-0.178*0.688;
lowp vec4 color = vec4(1.0-smoothstep(f,b+0.04,r)) + vec4(1.0-smoothstep(b,c+0.02,r/0.644)) + vec4(1.0-smoothstep(b,c+0.011,r/0.764));
gl_FragColor = color;
}
Here is an example of output I've got
How could I blend the sparkles?

In case if someone else will find it helpful, here is the code of rendering sparkles method based on Rabbid76 suggestion:
public func renderSparkles(_ positions:[Position]) {
imageFramebuffer.activateFramebufferForRendering()
imageFramebuffer.timingStyle = .stillImage
glEnable(GLenum(GL_POINT_SPRITE_OES))
glEnable(GLenum(GL_BLEND))
glBlendFunc(GLenum(GL_SRC_ALPHA),GLenum(GL_ONE_MINUS_CONSTANT_ALPHA))
sparklesShader.use()
uniformSettings.restoreShaderSettings(sparklesShader)
clearFramebufferWithColor(Color.transparent)
guard let positionAttribute = sparklesShader.attributeIndex("position") else {
fatalError("A position attribute was missing from the shader program during rendering.")
}
let convertedPositions = positions.flatMap{$0.toGLArray()}
glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, convertedPositions)
glDrawArrays(GLenum(GL_POINTS), 0, GLsizei(positions.count))
notifyTargets()
}

Related

How blending image mask?

How can I apply such a mask
to get effect such as bokeh
need to blur edge in mask and apply on image texture. How do that?
Vertex shader:
attribute vec4 a_Position;
void main()
{
gl_Position = a_Position;
}
Fragment shader:
precision lowp float;
uniform sampler2D u_Sampler; // textureSampler
uniform sampler2D u_Mask; // maskSampler
uniform vec3 iResolution;
vec4 blur(sampler2D source, vec2 size, vec2 uv) {
vec4 C = vec4(0.0);
float width = 1.0 / size.x;
float height = 1.0 / size.y;
float divisor = 0.0;
for (float x = -25.0; x <= 25.0; x++)
{
C += texture2D(source, uv + vec2(x * width, 0.0));
C += texture2D(source, uv + vec2(0.0, x * height));
divisor++;
}
C*=0.5;
return vec4(C.r / divisor, C.g / divisor, C.b / divisor, 1.0);
}
void main()
{
vec2 uv = gl_FragCoord.xy / iResolution.xy;
vec4 videoColor = texture2D(u_Sampler, uv);
vec4 maskColor = texture2D(u_Mask, uv);
gl_FragColor = blur(u_Sampler, iResolution.xy, uv);
}
vec4 blurColor = blur(u_Sampler, iResolution.xy, uv);
gl_FragColor = mix(blurColor, videoColor, maskColor.r);
But FYI it's not common to blur in one pass like you have. It's more common to blur in one direction (horizontally), then blur the result of that vertically, then mix the results, blurredTexture, videoTexture, mask.

Transparent objects with visible textures(set alpha channel)

I have to show transparent objects with texture that is set alpha channel.
Using OpenGL ES 2.0 and mtl2opengl.pl I could show object with texture on my iPhone but alpha channel didn't work.
This is almost same as mtl2opengl.pl sample. How should I change code?
in ViewController.m:
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect{
// Clear Buffers
glClearColor(1.0, 1.0, 1.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Set View Matrices
[self updateViewMatrices];
glUniformMatrix4fv(_uniforms.uProjectionMatrix, 1, 0, _projectionMatrix.m);
glUniformMatrix4fv(_uniforms.uModelViewMatrix, 1, 0, _modelViewMatrix.m);
glUniformMatrix3fv(_uniforms.uNormalMatrix, 1, 0, _normalMatrix.m);
// Attach Texture
glUniform1i(_uniforms.uTexture, 0);
// Set View Mode
glUniform1i(_uniforms.uMode, self.viewMode.selectedSegmentIndex);
// Enable Attributes
glEnableVertexAttribArray(_attributes.aVertex);
glEnableVertexAttribArray(_attributes.aNormal);
glEnableVertexAttribArray(_attributes.aTexture);
// Load OBJ Data
glVertexAttribPointer(_attributes.aVertex, 3, GL_FLOAT, GL_FALSE, 0, buildOBJVerts);
glVertexAttribPointer(_attributes.aNormal, 3, GL_FLOAT, GL_FALSE, 0, buildOBJNormals);
glVertexAttribPointer(_attributes.aTexture, 2, GL_FLOAT, GL_FALSE, 0, buildOBJTexCoords);
// Load MTL Data
for(int i=0; i<buildMTLNumMaterials; i++)
{
glUniform3f(_uniforms.uAmbient, buildMTLAmbient[i][0], buildMTLAmbient[i][1], buildMTLAmbient[i][2]);
glUniform3f(_uniforms.uDiffuse, buildMTLDiffuse[i][0], buildMTLDiffuse[i][1], buildMTLDiffuse[i][2]);
glUniform3f(_uniforms.uSpecular, buildMTLSpecular[i][0], buildMTLSpecular[i][1], buildMTLSpecular[i][2]);
glUniform1f(_uniforms.uExponent, buildMTLExponent[i]);
// Draw scene by material group
glDrawArrays(GL_TRIANGLES, buildMTLFirst[i], buildMTLCount[i]);
}
// Disable Attributes
glDisableVertexAttribArray(_attributes.aVertex);
glDisableVertexAttribArray(_attributes.aNormal);
glDisableVertexAttribArray(_attributes.aTexture);
}
Shader.fsh:
// FRAGMENT SHADER
static const char* ShaderF = STRINGIFY
(
// Input from Vertex Shader
varying mediump vec3 vNormal;
varying mediump vec2 vTexture;
// MTL Data
uniform lowp vec3 uAmbient;
uniform lowp vec3 uDiffuse;
uniform lowp vec3 uSpecular;
uniform highp float uExponent;
uniform lowp int uMode;
uniform lowp vec3 uColor;
uniform sampler2D uTexture;
lowp vec3 materialDefault(highp float df, highp float sf)
{
lowp vec3 diffuse = vec3(0.0,1.0,0.0);
highp float exponent = 1.0;
sf = pow(sf, exponent);
return (df * diffuse);
}
lowp vec3 materialMTL(highp float df, highp float sf)
{
sf = pow(sf, uExponent);
return (df * uDiffuse);
}
lowp vec3 modelColor(void)
{
highp vec3 N = normalize(vNormal);
highp vec3 L = vec3(1.0, 1.0, 0.5);
highp vec3 E = vec3(0.0, 0.0, 1.0);
highp vec3 H = normalize(L + E);
highp float df = max(0.0, dot(N, L));
highp float sf = max(0.0, dot(N, H));
// Default
if(uMode == 0)
return materialDefault(df, sf);
// Texture
else if(uMode == 1)
return (materialDefault(df, sf) * vec3(texture2D(uTexture, vTexture)));
// Material
else if(uMode == 2)
return materialMTL(df, sf);
}
void main(void)
{
lowp vec3 color = modelColor();
gl_FragColor = vec4(color, 1.0);
}
);
It's obvious, learn the language from a book, use vec4 everywhere (materialDefault, materialMTL, modelColor) and activate alpha blending.
example:
lowp vec4 materialMTL(highp float df, highp float sf)
{
sf = pow(sf, uExponent);
return vec4(df * uDiffuse, 1.0);
}
In lowp vec4 modelColor(void):
return (materialDefault(df, sf) * texture2D(uTexture, vTexture));
activate alpha blending:
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
What's the real problem ?
Add
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
Before rendering transparent objects.

Drawing a grid in a WebGL fragment shader

I'm working on porting a ZUI from SVG over to WebGL for a few reasons, and I'd like to render a grid using a fragment shader.
Here's the basic effect I'm going for https://dl.dropboxusercontent.com/u/412963/steel/restel_2.mp4
I'd like to have a triangle that has thin, 1px lines every 10 units, and a thicker 2px line every 100 units (the units here being arbitrary but consistent with world-space, not screen-space).
Here's what I have so far, without the secondary thicker lines like in the video (note that this is literally a copy from my open buffer, and obviously isn't right):
Vertex Shader:
attribute vec3 aVertexPosition;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying float vX;
varying float vY;
void main(void) {
vX = aVertexPosition.x;
vY = aVertexPosition.y;
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
}
Fragment Shader:
precision mediump float;
uniform vec2 resolution;
uniform float uZoomFactor;
varying float vX;
varying float vY;
void main(void) {
float distance = gl_FragCoord.z / gl_FragCoord.w;
float fuzz = 1.0 / distance;
float minorLineFreq;
if (distance > 10.0) {
minorLineFreq = 1.0;
} else if (distance > 5.0) {
minorLineFreq = 1.0;
} else {
minorLineFreq = 0.10;
}
float xd = mod(vX, minorLineFreq) * 88.1;
float yd = mod(vY, minorLineFreq) * 88.1;
if (xd < fuzz) {
gl_FragColor = vec4(0.0,0.0,0.0,1.0);
} else if (yd < fuzz) {
gl_FragColor = vec4(0.0,0.0,0.0,1.0);
} else {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}
}
It produces approximately the right image at a certain distance (but notice the banding effect where there's 2px lines instead of 1px):
Grid with banding
Zoomed in grid with unwanted thicker lines
So, how can I get a consistent grid, with 1px thick lines at every distance, all inside of a WebGL fragment shader?
I believe I've found an acceptable solution.
Using the following vertices (drawn in a triangle strip):
[ 1.0 1.0 0.0
-1.0 1.0 0.0
1.0 -1.0 0.0
-1.0 -1.0 0.0]
Vertex shader:
attribute vec4 aVertexPosition;
void main(void) {
gl_Position = aVertexPosition;
}
Fragment Shader:
precision mediump float;
uniform float vpw; // Width, in pixels
uniform float vph; // Height, in pixels
uniform vec2 offset; // e.g. [-0.023500000000000434 0.9794000000000017], currently the same as the x/y offset in the mvMatrix
uniform vec2 pitch; // e.g. [50 50]
void main() {
float lX = gl_FragCoord.x / vpw;
float lY = gl_FragCoord.y / vph;
float scaleFactor = 10000.0;
float offX = (scaleFactor * offset[0]) + gl_FragCoord.x;
float offY = (scaleFactor * offset[1]) + (1.0 - gl_FragCoord.y);
if (int(mod(offX, pitch[0])) == 0 ||
int(mod(offY, pitch[1])) == 0) {
gl_FragColor = vec4(0.0, 0.0, 0.0, 0.5);
} else {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}
}
Gives results (depending on the pitch and offset) like:
gl_FragCoord is already scaled to the render target resolution. So you can simply:
precision mediump float;
vec4 color = vec4(1.);
vec2 pitch = vec2(50., 50.);
void main() {
if (mod(gl_FragCoord.x, pitch[0]) < 1. ||
mod(gl_FragCoord.y, pitch[1]) < 1.) {
gl_FragColor = color;
} else {
gl_FragColor = vec4(0.);
}
}
https://glslsandbox.com/e#74754.0

GPUImage create a custom Filter that change selected colors

Using the amazing GPU image framework, I'm trying to create a custom filter using a custom fragment shader that passed some color vectors as uniforms, elaborate each fragment substituting a choosen color with one in the uniform. I made that using Quartz and it works, but since I'm moving my first step in OpenGL world using this framework, I'd like to give a try to the GPU processing.
The fragment shader I made seems to work fine, but there is a problem in the output. I post just a sample for debugging porpoise
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
bool compareVectors (lowp vec3 x,lowp vec3 y){
bool result;
if (x.r != y.r) {
return result = false;
}
if (x.b != y.b) {
return result = false;
}
if (x.g != y.g ) {
return result = false;
}
return result = true;
}
void main()
{
lowp vec3 tc = vec3(0.0, 0.0, 0.0);
lowp vec4 pixcol = texture2D(inputImageTexture, textureCoordinate).rgba;
lowp vec3 sampleColors[3];
lowp vec3 newColors[3];
sampleColors[0] = vec3(0.2, 0.2, 0.2);
sampleColors[1] = vec3(0.0, 0.0, 0.0);
sampleColors[2] = vec3(1.0, 1.0, 1.0);
newColors[0] = vec3(0.4, 0.4, 1.0);
newColors[1] = vec3(0.3, 0.4, 1.0);
newColors[2] = vec3(0.6, 0.7, 0.5);
if (pixcol.a >= 0.2) {
if (compareVectors (sampleColors[0],pixcol.rgb))
tc = newColors[0];
else if (compareVectors (sampleColors[1],pixcol.rgb))
tc = newColors[1];
else if (compareVectors (sampleColors[2],pixcol.rgb))
tc = newColors[2];
else
tc = pixcol.rgb;
}
else
tc = pixcol.rgb;
gl_FragColor = vec4(tc.rgb, pixcol.a);
}
The resulting image has a lot of artifacts. It seems pixellate on the screen and not well created if written to disk. Here are some screen.
The first image is the starting image, the second is a screenshot of the filtered mage on iphone screen, the third is the filtered image written to disk.
Digging into that I remembered that texel and pixel aren't the same thing, so probably I'm not mapping them correctly. I'd like to have a 1:1 position ratio and probably is not happening. How can I achieve that? Thanks, Andrea
As stated in the comment by Brad, the solution was to simply confront the texel color in a range of values. This is due to the float precision (I'm feeling stupid writing it right now, it was pretty obvious). The starting image was with fixed controlled color, but since the original image is sampled probably the information is not equal from the starting image.
Here is the correct fragment shader:
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
bool compareVectors (lowp vec3 sample,lowp vec3 texel){
bool result;
if ( abs(texel.r-sample.r) > 0.1 ) {
return result = false;
}
if ( abs(texel.g-sample.g) > 0.1 ) {
return result = false;
}
if ( abs(texel.b-sample.b) > 0.1 ) {
return result = false;
}
return result = true;
}
void main()
{
lowp vec3 tc = vec3(0.0, 0.0, 0.0);
lowp vec4 pixcol = texture2D(inputImageTexture, textureCoordinate).rgba;
lowp vec3 sampleColors[3];
lowp vec3 newColors[3];
sampleColors[0] = vec3(0.5, 0.5, 0.5);
sampleColors[1] = vec3(0.0, 0.0, 0.0);
sampleColors[2] = vec3(1.0, 1.0, 1.0);
newColors[0] = vec3(0.4, 0.4, 1.0);
newColors[1] = vec3(0.3, 0.4, 1.0);
newColors[2] = vec3(0.6, 0.7, 0.5);
if (pixcol.a >= 0.2) {
if (compareVectors (sampleColors[0],pixcol.rgb))
tc = newColors[0];
else if (compareVectors (sampleColors[1],pixcol.rgb))
tc = newColors[1];
else if (compareVectors (sampleColors[2],pixcol.rgb))
tc = newColors[2];
else
tc = pixcol.rgb;
}
else
tc = pixcol.rgb;
gl_FragColor = vec4(tc.rgb, pixcol.a);
}
I'd like to thanks Brad that found the answer. Hope this helps.

How do I draw a filled circle with OpenGL ES on iPhone?

How do I draw a filled circle with openGl on iPhone ?
I've found many solutions but none of them work. Probably because there are many ways to do it. But what's the method with shortest code ?
For a truly smooth circle, you're going to want a custom fragment shader. For example, the following vertex shader:
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
varying vec2 textureCoordinate;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
}
and fragment shader:
varying highp vec2 textureCoordinate;
const highp vec2 center = vec2(0.5, 0.5);
const highp float radius = 0.5;
void main()
{
highp float distanceFromCenter = distance(center, textureCoordinate);
lowp float checkForPresenceWithinCircle = step(distanceFromCenter, radius);
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0) * checkForPresenceWithinCircle;
}
will draw a smooth red circle within a square that you draw to the screen. You'll need to supply vertices for your square to the position attribute and coordinates that range from 0.0 to 1.0 in X and Y to the inputTextureCoordinate attribute, but this will draw a circle that's as sharp as your viewport's resolution allows and do so very quickly.
One way would be to use GL_POINTS:
glPointSize(radius);
glBegin(GL_POINTS);
glVertex2f(x,y);
glEnd();
Another alternative would be to use GL_TRIANGLE_FAN:
radius = 1.0;
glBegin(GL_TRIANGLE_FAN);
glVertex2f(x, y);
for(int angle = 1; angle <= 360; angle = angle + 1)
glVertex2f(x + sind(angle) * radius, y + cosd(angle) * radius);
glEnd();
To get the verices of a circle:
float[] verts=MakeCircle2d(1,100,0,0)
public static float[] MakeCircle2d(float rad,int points,float x,float y)//x,y ofsets
{
float[] verts=new float[points*2+2];
boolean first=true;
float fx=0;
float fy=0;
int c=0;
for (int i = 0; i < points; i++)
{
float fi = 2*Trig.PI*i/points;
float xa = rad*Trig.sin(fi + Trig.PI)+x ;
float ya = rad*Trig.cos(fi + Trig.PI)+y ;
if(first)
{
first=false;
fx=xa;
fy=ya;
}
verts[c]=xa;
verts[c+1]=ya;
c+=2;
}
verts[c]=fx;
verts[c+1]=fy;
return verts;
}
Draw it as GL10.GL_LINES if you want a empty circle
gl.glDrawArrays(GL10.GL_LINES, 0, verts.length / 2);
Or draw it as GL10.GL_TRIANGLE_FAN if you want a filled one
gl.glDrawArrays(GL10.GL_TRIANGLE_FAN, 0, verts.length / 2);
Its java but it is really easy to convert to c++/objc
Here is a super fast way using shaders... Just make a Quad with a vertex buffer and set the UV's from -1 to 1 for each corner of the quad.
The vertex buffer in floats should look like:
NOTE: this needs a index buffer too.
var verts = new float[20]
{
-1, -1, 0, -1, -1,
-1, 1, 0, -1, 1,
1, 1, 0, 1, 1,
1, -1, 0, 1, -1,
};
#VS
attribute vec3 Position0;
attribute vec2 Texcoord0;
varying vec4 Position_VSPS;
varying vec2 Texcoord_VSPS;
uniform vec2 Location;
void main()
{
vec3 loc = vec3(Position0.xy + Location, Position0.z);
gl_Position = Position_VSPS = vec4(loc, 1);
Texcoord_VSPS = loc.xy;
}
#END
#PS
varying vec4 Position_VSPS;
varying vec2 Texcoord_VSPS;
uniform vec2 Location;
void main()
{
float dis = distance(Location, Texcoord_VSPS);
if (.1 - dis < 0.0) discard;
gl_FragData[0] = vec4(0, 0, 1, 1);
}
#END

Resources