So i cant get my shader to render with color. My shader works when i dont set the color using the attribute Color.
my code for vertex is:
typedef struct
{
GLKVector3 Position; //Position
GLKVector4 Color; //32 Bit color
GLKVector3 Normal; //For Lighting
GLKVector2 TexCoord; //For Texturing
} Vertex;
I have given the colors for all vertices as [1,0,0,1]
My vertex shader is this:
attribute vec3 Position;
attribute vec4 Color;
attribute vec3 Normal;
attribute vec2 TexCoord;
uniform mat4 ModelViewMatrix;
uniform mat4 ProjectionMatrix;
varying vec4 DestinationColor;
void main(void)
{
gl_Position = ProjectionMatrix*ModelViewMatrix*vec4(Position,1);
DestinationColor = Color;
}
And my Fragment Shader is this:
precision mediump float;
varying lowp vec4 DestinationColor;
void main (void)
{
gl_FragColor =DestinationColor;
}
And it Displays nothing.
It doesnt even work if i change the fragment shader to say gl_FragColor = vec4(1,0,0,1); Unless i uncomment the line in vertex shader setting the DestinationColor.
Please help i have been sitting on this for a while now
I found the answer to this problem but i can't access my old account for bobjamin so I am using this new one.
The solution was fairly simple.
Firstly i should mention that drhass' suggestion did help in that it allowed me to set a static color from the vertex shader and it would display however the problem was that the name Color must be a reserved keyword and its was causing problems.
The Answer was to change the attribute Color to SourceColor and everything worked fine!
Related
I have a vertex shader which works fine on Windows with OpenGL. I want to use the same shader on an iPad which supports OpenGL ES2.0.
Compilation of the shader fails with:
Invalid storage qualifiers 'out' in global variable context
From what I have read, the 'out' keyword required GLSL 1.5 which the iPad won't support. Is there an equivalent keyword to 'out' that I can use to pass the color into my fragment shader?
attribute vec4 vPosition;
attribute vec4 vColor;
uniform mat4 MVP;
out vec4 pass_Color;
void main()
{
gl_Position = MVP * vPosition;
pass_Color = vColor;
}
This vertex shader is used by me to create gradient blends, so I'm assigning a color to each vertex of a triangle and then the fragment shader interpolates the color between each vertex. That's why I'm not passing a straight color directly into the fragment shader.
Solved! In GLSL ES 1.0 that I'm using, I need to use 'varying' instead of 'in' and 'out'. Here's the working shader:
attribute vec4 vPosition;
attribute vec4 vColor;
uniform mat4 MVP;
varying vec4 pass_Color;
void main()
{
gl_Position = MVP * vPosition;
pass_Color = vColor;
}
I have been trying to program a basic webgl spotlight shader, but no matter how hard I try, I cannot get the spotlights position to be relative to the world.The code that I am currently using is below, I have tried almost every coordinate frame that I can to get this working, but no matter what I do, I only get partially correct results.
For example if I switch to world coordinates the spotlights position will be correct, but it will only reflect off one object, or if I use the view space the light will work but it's position is relative to the camera.
In it's current state, the spotlight seems to be relative to each objects frame. (Not sure why.) Any help in solving helping this issue is greatly appreciated.
Vertex Shader:
attribute vec4 vPosition;
attribute vec4 vNormal;
attribute vec2 vTexCoord;
varying vec3 L, E, N,D;
varying vec2 fTexCoord;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform mat4 NormalMatrix;
uniform mat4 View;
uniform vec4 lightPosition;
uniform vec4 lightDirection;
uniform vec3 Eye;
void main(){
L= (modelViewMatrix * lightPosition).xyz; //Light position in eye coordinates
E = (modelViewMatrix * vPosition).xyz; //Vertex position in eye coordinates.
//Normal position in eye coordinate. Transpose(Inverse(modelViewMatrix) * vNormal.
N=(NormalMatrix * vNormal).xyz;
D=lightDirection.xyz;//Light direction
fTexCoord=vTexCoord;
gl_Position = projectionMatrix * modelViewMatrix * vPosition;
}
Fragment Shader:
precision mediump float;
uniform vec4 lDiffuseColor;
uniform vec4 lSpecular;
uniform vec4 lAmbientColor;
uniform float lShininess;
varying vec3 L,E,N,D;
const float lExponent=2.0;
const float lCutoff=0.867;
vec3 lWeight=vec3(0,0,0);
void main(){
vec3 vtoLS=normalize(L - E);//Vector to light source from vertex.
float Ks=pow(max(dot(normalize(N),vtoLS),0.0),lShininess);
vec3 specular=Ks* lSpecular.xyz;
float diffuseWeight=max(dot(normalize(N), -vtoLS),0.0);
vec3 diffuse=diffuseWeight * lDiffuseColor.xyz;
if(diffuseWeight >0.0){
float lEffect= dot(normalize(D),normalize(-vtoLS));
if(lEffect > lCutoff){
lEffect= pow(lEffect,Ks);
vec3 reflection= normalize(reflect(-vtoLS,normalize(N)));
vec3 vEye=-normalize(E);
float rdotv=max(dot(reflection,vEye),0.0);
float specularWeight=pow(rdotv,lShininess);
lWeight= (lEffect * diffuse.xyz + lEffect * specular.xyz) + vec3(0.5,0,0);
}
}
lWeight+=lAmbientColor.xyz;
gl_FragColor=vec4(lWeight.rgb,1);
}
Current Output: http://sta.sh/012uh5hwwlse
When I'm rendering my content onto a FBO with a texture bound to it and then render this bound texture to a fullscreen quad using a basic shader the performance drops ridiculously.
For example:
Render to screen directly (with basic shader):
And when render to texture first, then render texture with fullscreen quad: (with same basic shader, would be something like blur or bloom normally):
Anyone got an idea how to speed this up? Since the current performance is not usable. Also I'm using GLKit for the basic OpenGL stuff.
Need to use precisions in places where it's needed.
lowp - for colors, textures coord, normals etc.
highp - for matrices and vertices/positions
Quick reference , check the range of precisions, on 3 page in "Qualifiers".
// BasicShader.vsh
precision mediump float;
attribute highp vec2 position;
attribute lowp vec2 texCoord;
attribute lowp vec4 color;
varying lowp vec2 textureCoord;
varying lowp vec4 textureColor;
uniform highp mat4 projectionMat;
uniform highp mat4 worldMat;
void main() {
highp mat4 worldProj = worldMat * projectionMat;
gl_Position = worldProj * vec4(position, 0.0, 1.0);
textureCoord = texCoord;
textureColor = color;
}
// BasicShader.fsh
precision mediump float;
varying lowp vec2 textureCoord;
varying lowp vec4 textureColor;
uniform sampler2D sampler;
void main() {
lowp vec4 Color = texture2D(sampler, textureCoord);
gl_FragColor = Color * textureColor;
}
This is very likely caused by ill-performant openGL ES API calls.
You should attach a real device and do an openGL ES frame capture. (It really needs a real device, the option for frame capture won't be available with a simulator).
The frame capture will indicate memory and other warnings along with suggestions to fix them alongside each API call. Step through these and fix each. The performance should improve considerably.
Here's a couple of references to get this done:
Debugging openGL ES frame
Xcode tools overview
I am still getting used to OpenGL with shaders, been using OGL ES 1.0 before but it's time to update my knowledge! Now I have a problem with the simple shaders I'm looking at and I have searched for 2 days straight with no luck of a solution.
Problem is this: I render some cubes with a VBO in the form of (Vx, Vy, Vz, NormalX, NormalY, NormalZ, ColorR, ColorG, ColorB, ColorA) and this works nicely when I render it without the shader but I have to use the shader for translation and stuff (I know it can be done without but bear with me). Here is my vertex shader, default from OGL template in XCode:
attribute vec4 position;
attribute vec3 normal;
uniform vec3 translation;
varying lowp vec4 colorVarying;
uniform mat4 modelViewProjectionMatrix;
uniform mat3 normalMatrix;
void main()
{
vec3 eyeNormal = normalize(normalMatrix * normal);
vec3 lightPosition = vec3(0.0, 0.0, 10.0);
vec4 diffuseColor = vec4(0.4, 0.4, 1.0, 1.0);
float nDotVP = max(0.0, dot(eyeNormal, normalize(lightPosition)));
colorVarying = diffuseColor * nDotVP;
gl_Position = modelViewProjectionMatrix * (position + vec4(translation, 1));
}
And the fragment shader, also default:
varying lowp vec4 colorVarying;
void main()
{
gl_FragColor = colorVarying;
}
Now this ALWAYS renders whatever triangles I draw in the same color (defined by diffuseColor) without regard for the colors in the VBO. So I have tried and failed with other fragment shader like gl_FragColor = gl_FrontColor; but gl_FrontColor/gl_Color etc aren't included in OpenGL ES and are deprecated in OpenGL 3.x or something. I have also viewed code using texture samplers but since I'm not using textures but colors it gets a bit complicated for a beginner.
So my question is this, how would I have my fragmentshader find the Material Color of the current fragment being shaded?
If I should pass the colors in an array to the shaders, how would I do that and how, then, would I reference it with regard to the currently shading fragment?
(Some 'also's; tried not using a fragment shader but OGL doesn't allow only using vertex shader. Tried simply removing the gl_FragColor = colorVarying; but that leaves the colors really screwed up)
You need to add a colour attribute to your shader:
attribute vec4 position;
attribute vec3 normal;
attribute vec4 colour;
...and use that attribute instead of diffuseColor.
You must also tell OpenGL where to find that vertex attribute within your VBO using glVertexAttribPointer (I assume you are doing this for the position and normal attributes already).
I'm experimenting with some lighting techniques on iOS and I've been able to produce some effects that I'm pleased with by taking advantage of iOS' OpenGL ES extensions for depth lookup textures and a relatively simple Blinn-Phong shader:
The above shows 20 Suzanne monkeys being rendered at full-screen retina with multi-sampling and the following shader. I'm doing multi-sampling because it is only adding 1ms per frame. My current average render time is 30ms total (iPad 3), which is far too slow for 60fps.
Vertex shader:
//Position
uniform mat4 mvpMatrix;
attribute vec4 position;
uniform mat4 depthMVPMatrix;
uniform mat4 vpMatrix;
//Shadow out
varying vec3 ShadowCoord;
//Lighting
attribute vec3 normal;
varying vec3 normalOut;
uniform mat3 normalMatrix;
varying vec3 vertPos;
uniform vec4 lightColor;
uniform vec3 lightPosition;
void main() {
gl_Position = mvpMatrix * position;
//Used for handling shadows
ShadowCoord = (depthMVPMatrix * position).xyz;
ShadowCoord.z -= 0.01;
//Lighting calculations
normalOut = normalize(normalMatrix * normal);
vec4 vertPos4 = vpMatrix * position;
vertPos = vertPos4.xyz / vertPos4.w;
}
Fragment shader:
#extension GL_EXT_shadow_samplers : enable
precision lowp float;
uniform sampler2DShadow shadowTexture;
varying vec3 normalOut;
uniform vec3 lightPosition;
varying vec3 vertPos;
varying vec3 ShadowCoord;
uniform vec4 fillColor;
uniform vec3 specColor;
void main() {
vec3 normal = normalize(normalOut);
vec3 lightDir = normalize(lightPosition - vertPos);
float lambertian = max(dot(lightDir,normal), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
vec3 viewDir = normalize(-vertPos);
float specAngle = max(dot(reflectDir, viewDir), 0.0);"
float specular = pow(specAngle, 16.0);
gl_FragColor = vec4((lambertian * fillColor.xyz + specular * specColor) * shadow2DEXT(shadowTexture, ShadowCoord), fillColor.w);
}
I've read that it is possible to use textures as lookup tables to reduce computation in the fragment shader, however the linked example seems to be doing full Phong lighting, rather than Blinn-Phong (I'm not doing anything with surface tangents). Furthermore, when running the sample the lighting seemed fairly banded (the background on mine, which is a solid color + Phong shading, looks slightly banded as a result of compression - it looks far smoother on the device). Is it possible to use a lookup texture in my case, or am I going to have to move down to 30fps (which I can just about achieve), turn off multi-sampling and limit Phong shading to the monkeys, rather than the full screen? In a real world (i.e. game) scenario, am I going to need do be doing Phong shading across the entire screen anyway?