Related
I have rendered a rectangular prism using opengl es for ios in Xcode. I want to draw a square on one of the faces of the rectangular prism (front face). I cant use a different data structure to hold the vertices, color co-ordinates and texture because the rectangular prism rotates with touch, and if I use a separate datastructure then the square wont rotate with the rectangular prism. So in the same datastructure I want to make a square. When I add the co-ordinates to the data structure and specify the points of the triangles it does not render the new square. Here is the code...
typedef struct {
float Position[3];
float Color[4];
float TexCoord[2];
} Vertex;
const Vertex Vertices[] = {
// Front
{{1, -2, 0.10}, {1, 1, 1, 1}, {1, 0}},
{{1, 2, 0.10}, {1, 1, 1, 1}, {1, 1}},
{{-1, 2, 0.10}, {1, 1, 1, 1}, {0, 1}},
{{-1, -2, 0.10}, {1, 1, 1, 1}, {0, 0}},
// Back
{{1, 2, -0.10}, {1, 1, 1, 1}, {0, 1}},
{{-1, -2, -0.10}, {1, 1, 1, 1}, {1, 0}},
{{1, -2, -0.10}, {1, 1, 1, 1}, {0, 0}},
{{-1, 2, -0.10}, {1, 1, 1, 1}, {1, 1}},
// Left
{{-1, -2, 0.10}, {1, 1, 1, 1}, {1, 0}},
{{-1, 2, 0.10}, {1, 1, 1, 1}, {1, 1}},
{{-1, 2, -0.10}, {1, 1, 1, 1}, {0, 1}},
{{-1, -2, -0.10}, {1, 1, 1, 1}, {0, 0}},
// Rightself
{{1, -2, -0.10}, {1, 1, 1, 1}, {1, 0}},
{{1, 2, -0.10}, {1, 1, 1, 1}, {1, 1}},
{{1, 2, 0.10}, {1, 1, 1, 1}, {0, 1}},
{{1, -2, 0.10}, {1, 1, 1, 1}, {0, 0}},
// Top
{{1, 2, 0.10}, {1, 1, 1, 1}, {1, 0}},
{{1, 2, -0.10}, {1, 1, 1, 1}, {1, 1}},
{{-1, 2, -0.10}, {1, 1, 1, 1}, {0, 1}},
{{-1, 2, 0.10}, {1, 1, 1, 1}, {0, 0}},
// Bottom
{{1, -2, -0.10}, {1, 1, 1, 1}, {1, 0}},
{{1, -2, 0.10}, {1, 1, 1, 1}, {1, 1}},
{{-1, -2, 0.10}, {1, 1, 1, 1}, {0, 1}},
{{-1, -2, -0.10}, {1, 1, 1, 1}, {0, 0}},
//new square
{{1, -2, -0.1}, {1, 1, 1, 1}, {1, 0}},
{{1, -2, 0.1}, {1, 1, 1, 1}, {1, 1}},
{{-1, -2, 0.1}, {1, 1, 1, 1}, {0, 1}},
{{-1, -2, -0.1}, {1, 1, 1, 1}, {0, 0}},
};
const GLubyte Indices[] = {
// Front
0, 1, 2,
2, 3, 0,
// Back
4, 6, 5,
4, 5, 7,
// Left
8, 9, 10,
10, 11, 8,
// Right
12, 13, 14,
14, 15, 12,
// Top
16, 17, 18,
18, 19, 16,
// Bottom
20, 21, 22,
22, 23, 20,
//new square
24, 25, 26,
26, 27, 24,
};
Is the square not rendering because I messed up with my co-ordinates, or is there another method that I need to check and specify I am adding new co-ordinates. Also I am using GLKBaseEffect, so no vertex and fragment shaders.
- (void)setupGL {
[EAGLContext setCurrentContext:self.context];
glEnable(GL_CULL_FACE);
self.effect = [[GLKBaseEffect alloc] init];
NSDictionary * options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],
GLKTextureLoaderOriginBottomLeft,
nil];
NSError * error;
NSString *path = [[NSBundle mainBundle] pathForResource:#""ofType:#"png"];
GLKTextureInfo * info = [GLKTextureLoader textureWithContentsOfFile:path options:options error:&error];
if (info == nil) {
NSLog(#"Error loading file: %#", [error localizedDescription]);
}
self.effect.texture2d0.name = info.name;
self.effect.texture2d0.enabled = true;
// New lines
glGenVertexArraysOES(1, &_vertexArray);
glBindVertexArrayOES(_vertexArray);
// Old stuff
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
//glBufferData(GL_ARRAY_BUFFER, sizeof(VerticesTwo), VerticesTwo, GL_STATIC_DRAW);
glGenBuffers(1, &_indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(Indices), Indices, GL_STATIC_DRAW);
//glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(IndicesTwo), IndicesTwo, GL_STATIC_DRAW);
// New lines (were previously in draw)
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *) offsetof(Vertex, Position));
glEnableVertexAttribArray(GLKVertexAttribColor);
glVertexAttribPointer(GLKVertexAttribColor, 4, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *) offsetof(Vertex, Color));
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *) offsetof(Vertex, TexCoord));
glEnableVertexAttribArray(GLKVertexAttribPosition);
//glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(VertexTwo), (const GLvoid *) offsetof(Vertex, Position));
glEnableVertexAttribArray(GLKVertexAttribColor);
//glVertexAttribPointer(GLKVertexAttribColor, 4, GL_FLOAT, GL_FALSE, sizeof(VertexTwo), (const GLvoid *) offsetof(VertexTwo, Color));
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
//glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(VertexTwo), (const GLvoid *) offsetof(Vertex, TexCoord));
// New line
glBindVertexArrayOES(0);
_rotMatrix = GLKMatrix4Identity;
_quat = GLKQuaternionMake(0, 0, 0, 1);
_quatStart = GLKQuaternionMake(0, 0, 0, 1);
UITapGestureRecognizer * dtRec = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(doubleTap:)];
dtRec.numberOfTapsRequired = 2;
[self.view addGestureRecognizer:dtRec];
//[self addGestureRecognizer:dtRec];
//[self.view bringSubviewToFront: self.view];
}
Where is the rest of the code? How do you draw this?
If I am reading this correct you simply added extra vertices which are exactly the same as the bottom face of your shape which means you just draw the same face twice and there should be no change in the result.
No matter the rotations you may still have a square as a separate object. You simply use the same matrix as you use to rotate the prism and it will rotate with the square. You can even use a sub-matrix for the square to position it depending on the prism original coordinates and multiply the prism matrix with this matrix to get the desired result. This way you would for instance create a square that is rotating around the prism.
But to begin with you could at least change the color of the square to see if you can then find it. Even an eskimo will not se a white square over a white surface.
I am trying to draw a texture with transparency onto a background gradient, created by a vertex shader that interpolates colors between vertices. However, only the opaque parts of texture are being drawn.
I am using the blending function:
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
Rendering code:
struct vertex {
float position[3];
float color[4];
float texCoord[2];
};
typedef struct vertex vertex;
const vertex vertices[] = {
{{1, -1, 0}, {0, 167.0/255.0, 253.0/255.0, 1}, {1, 0}}, // BR (0)
{{1, 1, 0}, {0, 222.0/255.0, 1.0, 1}, {1, 1}}, // TR (1)
{{-1, 1, 0}, {0, 222.0/255.0, 1.0, 1}, {0, 1}}, // TL (2)
{{-1, -1, 0}, {0, 167.0/255.0, 253.0/255.0, 1}, {0, 0}}, // BL (3)
};
const GLubyte indicies[] = {
3, 2, 0, 1
};
-(void) render {
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, sizeof(vertex), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE, sizeof(vertex), (GLvoid*)(sizeof(float)*3));
glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, GL_FALSE, sizeof(vertex), (GLvoid*)(sizeof(float)*7));
glDrawElements(GL_TRIANGLE_STRIP, sizeof(indicies)/sizeof(indicies[0]), GL_UNSIGNED_BYTE, 0);
// Not sure if required for blending to work..
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(textureUniform, 0);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, sizeof(vertex), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE, sizeof(vertex), (GLvoid*)(sizeof(float)*3));
glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, GL_FALSE, sizeof(vertex), (GLvoid*)(sizeof(float)*7));
glDrawElements(GL_TRIANGLE_STRIP, sizeof(indicies)/sizeof(indicies[0]), GL_UNSIGNED_BYTE, 0);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
I'm unsure whether I need to do two lots of drawing to the render buffer in order for the blend function to work, so currently I am drawing without the texture binded, then with it binded.
Fragment Shader:
varying lowp vec4 destinationColor;
varying lowp vec2 texCoordOut;
uniform sampler2D tex;
void main() {
lowp vec4 tex2D = texture2D(tex, texCoordOut);
gl_FragColor = vec4(tex2D.rgb+destinationColor.rgb, tex2D.a*destinationColor.a);
}
Vertex Shader:
attribute vec4 position;
attribute vec4 sourceColor;
varying vec4 destinationColor;
attribute vec2 texCoordIn;
varying vec2 texCoordOut;
void main() {
destinationColor = sourceColor;
gl_Position = position;
texCoordOut = texCoordIn;
}
Texture loading code:
-(GLuint) loadTextureFromImage:(UIImage*)image {
CGImageRef textureImage = image.CGImage;
size_t width = CGImageGetWidth(textureImage);
size_t height = CGImageGetHeight(textureImage);
GLubyte* spriteData = (GLubyte*) malloc(width*height*4);
CGColorSpaceRef cs = CGImageGetColorSpace(textureImage);
CGContextRef c = CGBitmapContextCreate(spriteData, width, height, 8, width*4, cs, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(cs);
CGContextScaleCTM(c, 1, -1);
CGContextTranslateCTM(c, 0, -CGContextGetClipBoundingBox(c).size.height);
CGContextDrawImage(c, (CGRect){CGPointZero, {width, height}}, textureImage);
CGContextRelease(c);
GLuint glTex;
glGenTextures(1, &glTex);
glBindTexture(GL_TEXTURE_2D, glTex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)width, (int)height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
glBindTexture(GL_TEXTURE_2D, 0);
free(spriteData);
return glTex;
}
Any ideas what I am doing wrong?
The answer you gave yourself might suffice in your particular situation, but I don't think it is a very nice solution. You will probably run into many problems when you want to render more than two objects.
You draw the same object twice. First without a texture bound and then with the texture bound - with the blending done in the shader. But how would you do that with a third object?
I recommend using a different set of vertices for both objects. Something like this:
const vertex gradient_background[] = {
{{1, -1, 0}, {0, 167.0/255.0, 253.0/255.0, 1}, {1, 0}},
{{1, 1, 0}, {0, 222.0/255.0, 1.0, 1}, {1, 1}},
{{-1, 1, 0}, {0, 222.0/255.0, 1.0, 1}, {0, 1}},
{{-1, -1, 0}, {0, 167.0/255.0, 253.0/255.0, 1}, {0, 0}}
};
const vertex textured_object[] = {
{{1, -1, 0}, {0, 0, 0, 0}, {1, 0}},
{{1, 1, 0}, {0, 0, 0, 0}, {1, 1}},
{{-1, 1, 0}, {0, 0, 0, 0}, {0, 1}},
{{-1, -1, 0}, {0, 0, 0, 0}, {0, 0}}
};
And adjust your render function appropriately, also unbind texture to 0 after drawing.
-(void) render {
...
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, sizeof(gradient_background), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE, sizeof(gradient_background), (GLvoid*)(sizeof(float)*3));
glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, GL_FALSE, sizeof(gradient_background), (GLvoid*)(sizeof(float)*7));
glDrawElements(GL_TRIANGLE_STRIP, sizeof(indicies)/sizeof(indicies[0]), GL_UNSIGNED_BYTE, 0);
...
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(textureUniform, 0);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, sizeof(textured_object), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE, sizeof(textured_object), (GLvoid*)(sizeof(float)*3));
glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, GL_FALSE, sizeof(textured_object), (GLvoid*)(sizeof(float)*7));
glDrawElements(GL_TRIANGLE_STRIP, sizeof(indicies)/sizeof(indicies[0]), GL_UNSIGNED_BYTE, 0);
// Don't forget to unbind texture for next draw
glBindTexture(GL_TEXTURE_2D, 0);
...
}
Fragment shader
varying lowp vec4 destinationColor;
varying lowp vec2 texCoordOut;
uniform sampler2D tex;
void main() {
lowp vec4 tex2D = texture2D(tex, texCoordOut); // Returns (0, 0, 0, 1) when texture 0 is bound
gl_FragColor = destinationColor + tex2D;
}
Then use
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
Or any other blending function you wish.
Okay, well I feel silly. Attempting to use the blending function outside the fragment shader didn't do anything as the texture was already drawn. I just needed to use the equivalent inside the fragment shader:
varying lowp vec4 destinationColor;
varying lowp vec2 texCoordOut;
uniform sampler2D tex;
void main() {
lowp vec4 tex2D = texture2D(tex, texCoordOut);
lowp vec4 result = tex2D + vec4(1.0 - tex2D.a) * destinationColor;
gl_FragColor = result;
}
my app(ios8.0) is render streaming video in GLKViewController with Opengl-es
i have a problem that video is cut when landscape mode
problem capture: http://i.stack.imgur.com/3SrpF.png
maybe.. i need scaling texture, use vertex shader.
typedef struct {
float Position[3];
float Color[4];
float TexCoordp[2];
} Vertex;
const Vertex Vertices[] = {
{{1, -1, 0}, {1, 1, 1, 1}, {1, 1}},
{{1, 1, 0}, {1, 1, 1, 1}, {1, 0}},
{{-1, 1, 0}, {1, 1, 1, 1}, {0, 0}},
{{-1, -1, 0}, {1, 1, 1, 1}, {0, 1}}
};
const GLubyte Indices[] = {
0, 1, 2,
2, 3, 0
};
#pragma mark - shaders
#define STRINGIZE(x) #x
#define STRINGIZE2(x) STRINGIZE(x)
#define SHADER_STRING(text) # STRINGIZE2(text)
NSString *const vertexShaderString = SHADER_STRING
(
attribute vec4 Position; // 1
attribute vec4 SourceColor; // 2
varying vec4 DestinationColor; // 3
attribute vec2 TexCoordIn;
varying vec2 TexCoordOut;
void main(void) { // 4
DestinationColor = SourceColor; // 5
gl_Position = Position; // 6
TexCoordOut = TexCoordIn; // New
}
);
setup GL Code
- (void)setupGL
{
[EAGLContext setCurrentContext:self.context];
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
glGenBuffers(1, &_indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(Indices), Indices, GL_STATIC_DRAW);
// init the update render semaphore
_textureUpdateRenderSemaphore = dispatch_semaphore_create((long)1);
}
maybe i need change "Vertices", but i can't get any clue.
thank you so much for your help
I have a cube which i'm drawing on screen, i have activated the depth testing in order for it not to be transparent, the thing is, in the front and back side of the cube, it look's good.
but when translating and rotating the cube, the results are strange, here are some photos to demonstrate:
front:
sides:
This see through effect is not welcomed of course.
Here is some relevant code:
Indices & vertices:
const Vertex Vertices[] = {
{{1, -1, 0}, {1, 1, 1, 1}, {1, 0}},
{{1, 1, 0}, {1, 1, 1, 1}, {1, 1}},
{{-1, 1, 0}, {1, 1, 1, 1}, {0, 1}},
{{-1, -1, 0}, {1, 1, 1, 1}, {0, 0}},
{{1, -1, -1}, {1, 1, 1, 1}, {1, 0}},
{{1, 1, -1}, {1, 1, 1, 1}, {1, 1}},
{{-1, 1, -1}, {1, 1, 1, 1}, {0, 1}},
{{-1, -1, -1}, {1, 1, 1, 1}, {0, 0}}
};
const GLubyte Indices[] = {
// Front
0, 1, 2
,2, 3, 0,
// Back
4, 6, 5,
4, 7, 6,
// Left
2, 7, 3,
7, 6, 2,
// Right
0, 4, 1,
4, 1, 5,
// Top
6, 2, 1,
1, 6, 5,
// Bottom
0, 3, 7,
0, 7, 4,
};
Layer setup:
- (void)setupLayer {
_eaglLayer = (CAEAGLLayer*) self.layer;
_eaglLayer.opaque = YES;
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_NEVER);
}
Depth buffer setup:
- (void)setupDepthBuffer {
glGenRenderbuffers(1, &_depthRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, self.frame.size.width, self.frame.size.height);
}
Rendering:
- (void)render:(CADisplayLink*)displayLink {
glClearColor(0, 0, 0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
CC3GLMatrix *projection = [CC3GLMatrix matrix];
float h = 4.0f * self.frame.size.height / self.frame.size.width;
[projection populateFromFrustumLeft:-2 andRight:2 andBottom:-h/2 andTop:h/2 andNear:4 andFar:10];
glUniformMatrix4fv(_projectionUniform, 1, 0, projection.glMatrix);
CC3GLMatrix *modelView = [CC3GLMatrix matrix];
[modelView populateFromTranslation:CC3VectorMake(sin(CACurrentMediaTime()), 0, -5)];
_currentRotation += displayLink.duration * 90;
[modelView rotateBy:CC3VectorMake(_currentRotation, _currentRotation, 0)];
glUniformMatrix4fv(_modelViewUniform, 1, 0, modelView.glMatrix);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
glVertexAttribPointer(_positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(_colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float)* 3));
glVertexAttribPointer(_texCoordSlot, 2, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float) * 7));
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _floorTexture);
glUniform1i(_textureUniform, 0);
glDrawElements(GL_TRIANGLE_STRIP, sizeof(Indices)/sizeof(Indices[0]),
GL_UNSIGNED_BYTE, 0);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
EDIT:
adding the texture mapping function:
- (GLuint)setupTexture:(NSString *)fileName {
// 1
CGImageRef spriteImage = [UIImage imageNamed:fileName].CGImage;
if (!spriteImage) {
NSLog(#"Failed to load image %#", fileName);
exit(1);
}
// 2
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte * spriteData = (GLubyte *) calloc(width*height*4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData, width, height, 8, width*4,
CGImageGetColorSpace(spriteImage), kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
// 3
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
// 4
GLuint texName;
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
free(spriteData);
return texName;
}
What am i doing wrong over here?
glDepthFunc(GL_NEVER); - at least replace this with GL_LEQUAL.
I am learning opengl es 2.0.
I am trying to apply ortho projection in opengl es 2.0
I draw a sqaure but I actually don't get square shape on the screen.
And I am not sure which part I am missing.
Thank you for your help!
There are some methods in setupRenderingEnv that I did not post. but those methods are for setting up the frames and it works fine.
and m_program is created fine.
Again, thank you for your help.
// my vertex shader
attribute vec4 Position;
attribute vec4 SourceColor;
uniform mat4 Projection;
varying vec4 DestinationColor;
void main(void)
{
DestinationColor = SourceColor;
gl_Position = Projection * Position;
}
// my drawing file
typedef struct
{
float Position[3];
float Color[4];
}Vertex;
const Vertex Vertices[] =
{
{{100, -100, 0}, {1, 0, 0, 1}},
{{100, 100, 0}, {0, 1, 0, 1}},
{{-100, 100, 0}, {0, 0, 1, 1}},
{{-100, -100, 0}, {0, 0, 0, 1}}
};
const GLubyte Indices[] =
{
0, 1, 2,
2, 3, 0
};
- (void)setupRenderingEnv
{
[super setupRenderingEnv];
[self setupVertexBufferObjects];
[self setupRunLoop];
[self applyOrthoWithX:self.frame.size.width andY:self.frame.size.height];
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
}
//-- used for applying ortho in opengl es 2.0
- (void)applyOrthoWithX:(float)maxX andY:(float)maxY
{
float a = 1.0f / maxX;
float b = 1.0f / maxY;
float ortho[16] =
{
a, 0, 0, 0,
0, b, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
};
GLint projectionUniform = glGetUniformLocation(super.m_programHandle, "Projection");
glUniformMatrix4fv(projectionUniform, 1, 0, &ortho[0]);
}
//-- overriding drawCandle. it render image, draw candle
- (void)drawCandle
{
glClearColor(0, 104.0/255, 55.0/255, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
GLuint positionSlot = glGetAttribLocation(super.m_programHandle, "Position");
GLuint colorSlot = glGetAttribLocation(super.m_programHandle, "SourceColor");
glEnableVertexAttribArray(positionSlot);
glEnableVertexAttribArray(colorSlot);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid *)(sizeof(float) * 3));
glDrawElements(GL_TRIANGLES, sizeof(Indices)/sizeof(Indices[0]), GL_UNSIGNED_BYTE, 0);
glDisableVertexAttribArray(positionSlot);
glDisableVertexAttribArray(colorSlot);
[super drawCandle];
}
What shape is your viewport? If it's not square then that's the problem. The matrix you're creating - scaling by inverse width and inverse height - is going to make the width always be 1 unit wide and the height 1 unit tall. If the width and height aren't the same number of pixels, then squares won't draw square. You need to account for the aspect ratio of your viewport. Off the top of my head, I think it would be something more like this:
float ortho [ 16 ] = {
a / b, 0, 0, 0,
0, b, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
};
(I might have a/b inverted - can't remember)