Transparent objects with visible textures(set alpha channel) - ios

I have to show transparent objects with texture that is set alpha channel.
Using OpenGL ES 2.0 and mtl2opengl.pl I could show object with texture on my iPhone but alpha channel didn't work.
This is almost same as mtl2opengl.pl sample. How should I change code?
in ViewController.m:
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect{
// Clear Buffers
glClearColor(1.0, 1.0, 1.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Set View Matrices
[self updateViewMatrices];
glUniformMatrix4fv(_uniforms.uProjectionMatrix, 1, 0, _projectionMatrix.m);
glUniformMatrix4fv(_uniforms.uModelViewMatrix, 1, 0, _modelViewMatrix.m);
glUniformMatrix3fv(_uniforms.uNormalMatrix, 1, 0, _normalMatrix.m);
// Attach Texture
glUniform1i(_uniforms.uTexture, 0);
// Set View Mode
glUniform1i(_uniforms.uMode, self.viewMode.selectedSegmentIndex);
// Enable Attributes
glEnableVertexAttribArray(_attributes.aVertex);
glEnableVertexAttribArray(_attributes.aNormal);
glEnableVertexAttribArray(_attributes.aTexture);
// Load OBJ Data
glVertexAttribPointer(_attributes.aVertex, 3, GL_FLOAT, GL_FALSE, 0, buildOBJVerts);
glVertexAttribPointer(_attributes.aNormal, 3, GL_FLOAT, GL_FALSE, 0, buildOBJNormals);
glVertexAttribPointer(_attributes.aTexture, 2, GL_FLOAT, GL_FALSE, 0, buildOBJTexCoords);
// Load MTL Data
for(int i=0; i<buildMTLNumMaterials; i++)
{
glUniform3f(_uniforms.uAmbient, buildMTLAmbient[i][0], buildMTLAmbient[i][1], buildMTLAmbient[i][2]);
glUniform3f(_uniforms.uDiffuse, buildMTLDiffuse[i][0], buildMTLDiffuse[i][1], buildMTLDiffuse[i][2]);
glUniform3f(_uniforms.uSpecular, buildMTLSpecular[i][0], buildMTLSpecular[i][1], buildMTLSpecular[i][2]);
glUniform1f(_uniforms.uExponent, buildMTLExponent[i]);
// Draw scene by material group
glDrawArrays(GL_TRIANGLES, buildMTLFirst[i], buildMTLCount[i]);
}
// Disable Attributes
glDisableVertexAttribArray(_attributes.aVertex);
glDisableVertexAttribArray(_attributes.aNormal);
glDisableVertexAttribArray(_attributes.aTexture);
}
Shader.fsh:
// FRAGMENT SHADER
static const char* ShaderF = STRINGIFY
(
// Input from Vertex Shader
varying mediump vec3 vNormal;
varying mediump vec2 vTexture;
// MTL Data
uniform lowp vec3 uAmbient;
uniform lowp vec3 uDiffuse;
uniform lowp vec3 uSpecular;
uniform highp float uExponent;
uniform lowp int uMode;
uniform lowp vec3 uColor;
uniform sampler2D uTexture;
lowp vec3 materialDefault(highp float df, highp float sf)
{
lowp vec3 diffuse = vec3(0.0,1.0,0.0);
highp float exponent = 1.0;
sf = pow(sf, exponent);
return (df * diffuse);
}
lowp vec3 materialMTL(highp float df, highp float sf)
{
sf = pow(sf, uExponent);
return (df * uDiffuse);
}
lowp vec3 modelColor(void)
{
highp vec3 N = normalize(vNormal);
highp vec3 L = vec3(1.0, 1.0, 0.5);
highp vec3 E = vec3(0.0, 0.0, 1.0);
highp vec3 H = normalize(L + E);
highp float df = max(0.0, dot(N, L));
highp float sf = max(0.0, dot(N, H));
// Default
if(uMode == 0)
return materialDefault(df, sf);
// Texture
else if(uMode == 1)
return (materialDefault(df, sf) * vec3(texture2D(uTexture, vTexture)));
// Material
else if(uMode == 2)
return materialMTL(df, sf);
}
void main(void)
{
lowp vec3 color = modelColor();
gl_FragColor = vec4(color, 1.0);
}
);

It's obvious, learn the language from a book, use vec4 everywhere (materialDefault, materialMTL, modelColor) and activate alpha blending.
example:
lowp vec4 materialMTL(highp float df, highp float sf)
{
sf = pow(sf, uExponent);
return vec4(df * uDiffuse, 1.0);
}
In lowp vec4 modelColor(void):
return (materialDefault(df, sf) * texture2D(uTexture, vTexture));
activate alpha blending:
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
What's the real problem ?

Add
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
Before rendering transparent objects.

Related

Webgl - How to make Specular Light not change size

I am trying to implement specular lighting (thats coming from the front) but the light is always changing size in an unnatural way. How do I fix this?
I hardcoded viewerPos to test. I'm using a halfway vector "shortcut" so I have to calculate less things as explained here: https://webglfundamentals.org/webgl/lessons/webgl-3d-lighting-point.html
Video with my lighting implemented: https://streamable.com/j95bz7
// Vertex shader program
const vsSource = `
attribute vec4 aVertexPosition;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoord;
uniform mat4 uNormalMatrix;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
uniform highp vec3 uViewPos;
varying highp vec2 vTextureCoord;
varying highp vec4 vNormal;
varying highp mat4 vModelViewMatrix;
varying highp vec3 vPos;
void main(void) {
gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
vTextureCoord = aTextureCoord; //Textura
vModelViewMatrix = uModelViewMatrix;
vPos = (uModelViewMatrix * aVertexPosition).xyz;
vNormal = uNormalMatrix * vec4(aVertexNormal, 1.0);
}
`;
// Fragment shader program
const fsSource = `
varying highp vec2 vTextureCoord;
varying highp vec4 vNormal;
varying highp mat4 vModelViewMatrix;
varying highp vec3 vPos;
uniform sampler2D uSampler;
void main(void) {
// Apply lighting effect
highp vec4 texelColor = texture2D(uSampler, vTextureCoord);
//Luz Ambiente
highp vec3 ambientLight = 0.3 * vec3(1.0, 1.0, 1.0);
//Luz Difusa
highp vec3 directionalLightColor = vec3(1, 1, 1);
highp vec3 directionalVector = vec3(0.0, 0.0, 1.0);
highp float directional = max(dot(vNormal.xyz, normalize(directionalVector)), 0.0);
//Luz Especular
highp vec3 viewerPos = vec3(0, 0, -6); //NOTA: PASSAR PARA SHADERS, NAO DAR HARDCODE
highp vec3 surfaceToLightDirection = (-1.0 * directionalVector);
highp vec3 surfaceToViewDirection = (vPos - viewerPos);
highp vec3 halfVector = normalize(surfaceToLightDirection + surfaceToViewDirection);
highp float specular = max(dot(vNormal.xyz, halfVector), 0.0);
highp vec3 vLighting = ambientLight;// + (directionalLightColor * directional);
gl_FragColor = vec4(texelColor.rgb * vLighting + (specular * 0.5), texelColor.a);
}
`;

How blending image mask?

How can I apply such a mask
to get effect such as bokeh
need to blur edge in mask and apply on image texture. How do that?
Vertex shader:
attribute vec4 a_Position;
void main()
{
gl_Position = a_Position;
}
Fragment shader:
precision lowp float;
uniform sampler2D u_Sampler; // textureSampler
uniform sampler2D u_Mask; // maskSampler
uniform vec3 iResolution;
vec4 blur(sampler2D source, vec2 size, vec2 uv) {
vec4 C = vec4(0.0);
float width = 1.0 / size.x;
float height = 1.0 / size.y;
float divisor = 0.0;
for (float x = -25.0; x <= 25.0; x++)
{
C += texture2D(source, uv + vec2(x * width, 0.0));
C += texture2D(source, uv + vec2(0.0, x * height));
divisor++;
}
C*=0.5;
return vec4(C.r / divisor, C.g / divisor, C.b / divisor, 1.0);
}
void main()
{
vec2 uv = gl_FragCoord.xy / iResolution.xy;
vec4 videoColor = texture2D(u_Sampler, uv);
vec4 maskColor = texture2D(u_Mask, uv);
gl_FragColor = blur(u_Sampler, iResolution.xy, uv);
}
vec4 blurColor = blur(u_Sampler, iResolution.xy, uv);
gl_FragColor = mix(blurColor, videoColor, maskColor.r);
But FYI it's not common to blur in one pass like you have. It's more common to blur in one direction (horizontally), then blur the result of that vertically, then mix the results, blurredTexture, videoTexture, mask.

OpenGLES vertex shader clarification

I have a vertex shader written as follows:
attribute vec4 position;
varying vec3 colorFactor;
void main()
{
colorFactor = vec3(1.0, 0.0, 0.0);
gl_Position = vec4(-1.0 + (position.x * 0.0078125), 0.0, 0.0, 1.0);
gl_PointSize = 1.0;
}
and in the code, vertices are submitted and drawn as follows:
glBlendEquation(GL_FUNC_ADD);
glBlendFunc(GL_ONE, GL_ONE);
glEnable(GL_BLEND);
glVertexAttribPointer(positionAttribute, 4, GL_UNSIGNED_BYTE, 0, 15*4, vertexSamplingCoordinates);
glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / 16);
Variable vertexSamplingCoordinates points to the base address of pixel buffer.
My question is for each vertex, what does is the value of position and how is it computed? I know each component of gl_Position is between -1 & 1 but in each iteration of vertex shader, what is the value of position?
EDIT: Here is the fragment shader as well:
const lowp float scalingFactor = 1.0 / 256.0;
varying lowp vec3 colorFactor;
void main()
{
gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);
}

basic arithmetic in iOS GLSL fragment shader

Here is my fragment shader program which runs on iOS 5.1.1 on iPhone 3g.
#ifdef GL_ES
precision lowp float;
#endif
varying vec2 v_texCoord;
void main()
{
float offset = sin(v_texCoord.x * 10.0);
// offset = offset * 1.0; // (!!!)
gl_FragColor = vec4(offset, 0.0, 0.0, 1.0);
}
Which produces nice sin:
Note the line marked by (!!!). I suppose, multiplying float value by 1.0 must change nothing. (The same shader being run under Windows OS works exactly as expected.) So, I uncomment the line and receive this:
WTF?!
How to reproduce:
Create iOS Game project from standard template in xcode. If you run the project then you will see two floating cubes. One red and one blue.
Change contents of Shader.vsh to the following. (I've just added one varying parameter v_pos).
attribute vec4 position;
attribute vec3 normal;
varying lowp vec4 colorVarying;
varying lowp vec4 v_pos;
uniform mat4 modelViewProjectionMatrix;
uniform mat3 normalMatrix;
void main()
{
vec3 eyeNormal = normalize(normalMatrix * normal);
vec3 lightPosition = vec3(0.0, 0.0, 1.0);
vec4 diffuseColor = vec4(0.4, 0.4, 1.0, 1.0);
float nDotVP = max(0.0, dot(eyeNormal, normalize(lightPosition)));
colorVarying = diffuseColor * nDotVP;
gl_Position = modelViewProjectionMatrix * position;
v_pos = gl_Position;
}
Chnge Shader.fsh to the following.
varying lowp vec4 colorVarying;
varying lowp vec4 v_pos;
void main()
{
gl_FragColor = colorVarying;
lowp float a = sin(v_pos.x * 10.0);
// a = a * 1.0; // (!!!)
gl_FragColor = vec4(a, 0.0, 0.0, 1.0);
}
Run program on a device and see cool bars on blue cube:
Uncomment the line (a = a * 1.0;) marked by (!!!) and run again:
Well, let me answer my own question.
The problem is in lowp precision qualifier used in shader.
After changing it to mediump shader started to work as expected.

ipad1 vs ipad2: different render results with OpenGL

when I'm running my app on ipad1 I see different result then when I'm running the same app on ipad2. I'm interested in book size in the middle of screen.
iPad1 result
iPad2 result
My code is below.
- (void)drawGallery{
glDepthFunc(GL_LESS);
glEnable(GL_DEPTH_TEST);
ResourceManager *rm = [ResourceManager sharedInstance];
GLuint currProgram = [rm getProgram:PROGRAM_BASIC_LIGHTNING];
glUseProgram(currProgram);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glViewport(0, 0, 768, 1024 + 2 * GALLERY_OFFSET_Y);
GLfloat modelviewProj[16];
for (Sheet *sh in self.interfaceManager.gallery.sheets){
if ([self.interfaceManager.gallery shouldDrawSheet:sh]){
[self MakePerspectiveMatrix:modelviewProj
OriginX:0.0
OriginY:0.0
Width:SCREEN_WIDTH
Height:SCREEN_HEIGHT
Rotation:sh.rotation
TranslationX:sh.translationX
TranslationZ:sh.translationZ
ScaleX:1.0
ScaleY:SCREEN_HEIGHT/(SCREEN_HEIGHT + 2 * GALLERY_OFFSET_Y)];
vertexDataTextured *model;
if (sh.type == SHEET_TYPE_LEFT){
model = (vertexDataTextured *)[rm getModel:MODEL_SHEET_LEFT];
} else {
model = (vertexDataTextured *)[rm getModel:MODEL_SHEET_RIGHT];
}
// update uniform values
glUniformMatrix4fv(basic_lighting_uniforms[BASIC_LIGHTING_UNIFORM_MODEL_VIEW_PROJECTION_MATRIX], 1, GL_FALSE, modelviewProj);
glUniform1f(basic_lighting_uniforms[BASIC_LIGHTING_UNIFORM_ROTATION], degreeToRadians(-sh.rotation));
glActiveTexture(GL_TEXTURE0);
GLuint texture = [rm getTexture:TEXTURE_COLORING_PICTURE Index1:self.currentBook.number Index2:sh.number];
glBindTexture (GL_TEXTURE_2D, texture);
glUniform1i(basic_lighting_uniforms[BASIC_LIGHTING_UNIFORM_TEXTURE], 0);
glVertexAttribPointer(BASIC_LIGHTING_ATTRIB_VERTEX, 3, GL_FLOAT, GL_FALSE, sizeof(vertexDataTextured), &model[0].vertex);
glEnableVertexAttribArray(BASIC_LIGHTING_ATTRIB_VERTEX);
glVertexAttribPointer(BASIC_LIGHTING_ATTRIB_TEX_COORDS, 2, GL_FLOAT, GL_FALSE, sizeof(vertexDataTextured), &model[0].texCoord);
glEnableVertexAttribArray(BASIC_LIGHTING_ATTRIB_TEX_COORDS);
glDrawArrays(GL_TRIANGLES, 0, 6);
if (![self validateProgram:currProgram]) {
NSLog(#"Failed to validate program: (%d)", currProgram);
}
}
}
}
Vertex Shader:
uniform mat4 modelViewProjectionMatrix;
uniform float rotation;
attribute lowp vec3 position;
attribute lowp vec2 texCoords;
varying lowp vec2 fTexCoords;
varying lowp vec4 computed_color;
void main()
{
fTexCoords = texCoords;
vec4 postmp = vec4(position.xyz, 1.0);
vec4 newposition = modelViewProjectionMatrix * postmp;
gl_Position = newposition;
vec3 ambient_color = vec3(0.5, 0.5, 0.5);
vec3 norm = vec3(sin(rotation), 0.0, cos(rotation));
vec3 light_position1 = vec3(15.0, 0.0, 20.0);
vec3 light_direction1 = normalize( vec3 (0.2, 0.0, 1.0) );
vec4 light_diffuse_color1 = vec4 (0.4, 0.4, 0.4, 1.0);
float ndotl1 = max(0.0, dot(light_direction1, norm));
vec4 temp_color1 = ndotl1 * light_diffuse_color1 * 15.0 / (length(light_position1 - newposition.xyz));
vec3 light_position2 = vec3(-30.0, 0.0, 25.0);
vec3 light_direction2 = normalize( vec3(-0.2, 0.0, 1.0) );
vec4 light_diffuse_color2 = vec4 (0.5, 0.5, 0.5, 1.0);
float ndotl2 = max(0.0, dot(light_direction2, norm));
vec4 temp_color2 = ndotl2 * light_diffuse_color2 * 15.0 / (length(light_position2 - newposition.xyz));
computed_color = vec4(temp_color1.rgb + temp_color2.rgb + ambient_color, 1.0);
}
Fragment shader:
precision lowp float;
uniform sampler2D texture;
varying vec2 fTexCoords;
varying lowp vec4 computed_color;
void main()
{
gl_FragColor = texture2D(texture, fTexCoords) * computed_color;
}
The problem was in lowp presicion for position. Thanks to Brad Larson.

Resources