Open GL Rendering not proper in iOS when using glDrawArrays(GL_TRIANGLES,0,numVert); - ios

I am using the following code to render a .h file for Open GL.
However, The eventual result comes in triangles and not the whole thing. Please see the attached image. Can anyone please guide me why this is happening. As I am new to Open GL.
I want to develop an app like - https://itunes.apple.com/us/app/mclaren-p1/id562173543?mt=8
- (void)renderFrameQCAR
{
[self setFramebuffer];
// Clear colour and depth buffers
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Render video background and retrieve tracking state
QCAR::State state = QCAR::Renderer::getInstance().begin();
QCAR::Renderer::getInstance().drawVideoBackground();
//NSLog(#"active trackables: %d", state.getNumActiveTrackables());
if (QCAR::GL_11 & qUtils.QCARFlags) {
glEnable(GL_TEXTURE_2D);
glEnable(GL_LIGHTING);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
}
glEnable(GL_DEPTH_TEST);
// We must detect if background reflection is active and adjust the culling direction.
// If the reflection is active, this means the pose matrix has been reflected as well,
// therefore standard counter clockwise face culling will result in "inside out" models.
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON)
glFrontFace(GL_CW); //Front camera
else
glFrontFace(GL_CCW); //Back camera
for (int i = 0; i < state.getNumTrackableResults(); ++i) {
// Get the trackable
const QCAR::TrackableResult* result = state.getTrackableResult(i);
const QCAR::Trackable& trackable = result->getTrackable();
QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose());
// Choose the texture based on the target name
int targetIndex = 0; // "stones"
if (!strcmp(trackable.getName(), "chips"))
targetIndex = 1;
else if (!strcmp(trackable.getName(), "tarmac"))
targetIndex = 2;
Object3D *obj3D = [objects3D objectAtIndex:targetIndex];
// Render using the appropriate version of OpenGL
if (QCAR::GL_11 & qUtils.QCARFlags) {
// Load the projection matrix
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(qUtils.projectionMatrix.data);
// Load the model-view matrix
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(modelViewMatrix.data);
glTranslatef(0.0f, 0.0f, -kObjectScale);
glScalef(kObjectScale, kObjectScale, kObjectScale);
// Draw object
glBindTexture(GL_TEXTURE_2D, [obj3D.texture textureID]);
glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*)obj3D.texCoords);
glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*)obj3D.vertices);
glVertexPointer(3, GL_FLOAT, 0, MclarenInfoVerts);
glNormalPointer(GL_FLOAT, 0, MclarenInfoNormals);
glTexCoordPointer(2, GL_FLOAT, 0, MclarenInfoTexCoords);
// draw data
glDrawArrays(GL_TRIANGLES, 0, MclarenInfoNumVerts);
}
#ifndef USE_OPENGL1
else {
// OpenGL 2
QCAR::Matrix44F modelViewProjection;
ShaderUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]);
ShaderUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0], &modelViewMatrix.data[0], &modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.vertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.normals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.texCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, [obj3D.texture textureID]);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (const GLfloat*)&modelViewProjection.data[0]);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glVertexPointer(3, GL_FLOAT, 0, MclarenInfoVerts);
glNormalPointer(GL_FLOAT, 0, MclarenInfoNormals);
glTexCoordPointer(2, GL_FLOAT, 0, MclarenInfoTexCoords);
// draw data
glDrawArrays(GL_TRIANGLES, 0, MclarenInfoNumVerts);
ShaderUtils::checkGlError("EAGLView renderFrameQCAR");
}
#endif
}
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
if (QCAR::GL_11 & qUtils.QCARFlags) {
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
#ifndef USE_OPENGL1
else {
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
}
#endif
QCAR::Renderer::getInstance().end();
[self presentFramebuffer];
}

Ok. I found the issue. It so happened that my designer was giving me a file with quadilateral rendering, where as Vuforia-Qualcomm-Unity etc recognizes just triangulated .obj. For anyone who gets stuck here :) Just let your designer know to render in triangulation and not Quad.

Related

iOS YUV 420v using GL_TEXTURE_2D shows wrong colour in OpenGL shader

Goal: To use GL_TEXTURE_2D instead of CVOpenGLESTextureRef to push the YUV data (format is '420v' kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) to the shaders (why? Because I need to use glTexSubImage2d to manipulate pixels, and I can't use that with the target being CVOpenGLESTextureGetTarget(<name>), it has no effect. I must use GL_TEXTURE_2D)
Problem:
I am using a custom video compositor to manipulate an AVPlayer video. When I use CVOpenGLESTextureRef like in Apple's AVCustomEdit sample code, which uses 2 separate shaders, one for Y (luma) and one for UV (chroma), video looks normal like this:
But trying to use GL_TEXTURE_2D instead makes video just show green and pink colors like this:
And like this if I use the GL_TEXTURE_2D with the fragment shader that combines both Y and UV textures it looks even worse like this:
My code:
First the track buffer and destination buffer are created:
CVPixelBufferRef foregroundSourceBuffer = [request sourceFrameByTrackID:currentInstruction.foregroundTrackID];
CVPixelBufferRef dstBuffer = [_renderContext newPixelBuffer];
Then they get passed to the render function which contains the following relevant code:
CVOpenGLESTextureRef foregroundLumaTexture = [self lumaTextureForPixelBuffer:foregroundPixelBuffer];
CVOpenGLESTextureRef foregroundChromaTexture = [self chromaTextureForPixelBuffer:foregroundPixelBuffer];
CVOpenGLESTextureRef destLumaTexture = [self lumaTextureForPixelBuffer:destinationPixelBuffer];
CVOpenGLESTextureRef destChromaTexture = [self chromaTextureForPixelBuffer:destinationPixelBuffer];
The luma texture function returns this:
CVOpenGLESTextureRef luma = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RED_EXT,
(int)CVPixelBufferGetWidth(pixelBuffer),
(int)CVPixelBufferGetHeight(pixelBuffer),
GL_RED_EXT,
GL_UNSIGNED_BYTE,
0,
&lumaTexture);
The chroma texture function returns this:
CVOpenGLESTextureRef chroma = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG_EXT,
(int)CVPixelBufferGetWidthOfPlane(pixelBuffer, 1),
(int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 1),
GL_RG_EXT,
GL_UNSIGNED_BYTE,
1,
&chromaTexture);
Now the relevant body of the render function:
glBindFramebuffer(GL_FRAMEBUFFER, self.offscreenBufferHandle);
glViewport(0, 0, (int)CVPixelBufferGetWidthOfPlane(destinationPixelBuffer, 0), (int)CVPixelBufferGetHeightOfPlane(destinationPixelBuffer, 0));
#ifdef USE_GL_TEXTURE_2D
int bufferWidth = CVPixelBufferGetWidth(foregroundPixelBuffer);
int bufferHeight = CVPixelBufferGetHeight(foregroundPixelBuffer);
GLuint frameTextureY;
GLuint frameTextureUV;
glGenTextures(1, &frameTextureY);
glGenTextures(1, &frameTextureUV);
if(CVPixelBufferLockBaseAddress(foregroundPixelBuffer, 0) == kCVReturnSuccess){
glBindTexture(GL_TEXTURE_2D, frameTextureY);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, bufferWidth, bufferHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddressOfPlane(foregroundPixelBuffer, 0));
glBindTexture(GL_TEXTURE_2D, frameTextureUV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddressOfPlane(foregroundPixelBuffer, 1));
CVPixelBufferUnlockBaseAddress(foregroundPixelBuffer, 0);
}
#endif
glActiveTexture(GL_TEXTURE0);
#ifdef USE_GL_TEXTURE_2D
glUseProgram(self.programYUV_2);
glBindTexture(GL_TEXTURE_2D, frameTextureY);
glUniformMatrix4fv(uniforms[UNIFORM_RENDER_TRANSFORM_YUV_2], 1, GL_FALSE, preferredRenderTransform);
#else
glUseProgram(self.programY);
glBindTexture(CVOpenGLESTextureGetTarget(foregroundLumaTexture), CVOpenGLESTextureGetName(foregroundLumaTexture));
glUniformMatrix4fv(uniforms[UNIFORM_RENDER_TRANSFORM_Y], 1, GL_FALSE, preferredRenderTransform);
#endif
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Attach the destination texture as a color attachment to the off screen frame buffer
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget(destLumaTexture), CVOpenGLESTextureGetName(destLumaTexture), 0);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
goto bail;
}
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
#ifdef USE_GL_TEXTURE_2D
glUniform1i(uniforms[UNIFORM_TEXTURE_YUV_2_Y], 0);
glVertexAttribPointer(ATTRIB_VERTEX_Y_UV_INONESHADER, 2, GL_FLOAT, 0, 0, quadVertexData1);
glEnableVertexAttribArray(ATTRIB_VERTEX_Y_UV_INONESHADER);
glVertexAttribPointer(ATTRIB_TEXCOORD_Y_UV_INONESHADER, 2, GL_FLOAT, 0, 0, quadTextureData1);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_Y_UV_INONESHADER);
#else
glUniform1i(uniforms[UNIFORM_TEXTURE_Y], 0);
glVertexAttribPointer(ATTRIB_VERTEX_Y, 2, GL_FLOAT, 0, 0, quadVertexData1);
glEnableVertexAttribArray(ATTRIB_VERTEX_Y);
glVertexAttribPointer(ATTRIB_TEXCOORD_Y, 2, GL_FLOAT, 0, 0, quadTextureData1);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_Y);
#endif
glDrawArrays(GL_TRIANGLE_STRIP, 0, 5);
glActiveTexture(GL_TEXTURE1);
#ifdef USE_GL_TEXTURE_2D
//no need to use different program
glBindTexture(GL_TEXTURE_2D, frameTextureUV);
glUniformMatrix4fv(uniforms[UNIFORM_RENDER_TRANSFORM_YUV_2], 1, GL_FALSE, preferredRenderTransform);
#else
glUseProgram(self.programUV);
glBindTexture(CVOpenGLESTextureGetTarget(foregroundChromaTexture), CVOpenGLESTextureGetName(foregroundChromaTexture));
glUniformMatrix4fv(uniforms[UNIFORM_RENDER_TRANSFORM_UV], 1, GL_FALSE, preferredRenderTransform);
#endif
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glViewport(0, 0, (int)CVPixelBufferGetWidthOfPlane(destinationPixelBuffer, 1), (int)CVPixelBufferGetHeightOfPlane(destinationPixelBuffer, 1));
// Attach the destination texture as a color attachment to the off screen frame buffer
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget(destChromaTexture), CVOpenGLESTextureGetName(destChromaTexture), 0);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
goto bail;
}
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
#ifdef USE_GL_TEXTURE_2D
glUniform1i(uniforms[UNIFORM_TEXTURE_YUV_2_UV], 1);
glVertexAttribPointer(ATTRIB_VERTEX_Y_UV_INONESHADER, 2, GL_FLOAT, 0, 0, quadVertexData1);
glEnableVertexAttribArray(ATTRIB_VERTEX_Y_UV_INONESHADER);
glVertexAttribPointer(ATTRIB_TEXCOORD_Y_UV_INONESHADER, 2, GL_FLOAT, 0, 0, quadTextureData1);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_Y_UV_INONESHADER);#else
glUniform1i(uniforms[UNIFORM_TEXTURE_UV], 1);
glVertexAttribPointer(ATTRIB_VERTEX_UV, 2, GL_FLOAT, 0, 0, quadVertexData1);
glEnableVertexAttribArray(ATTRIB_VERTEX_UV);
glVertexAttribPointer(ATTRIB_TEXCOORD_UV, 2, GL_FLOAT, 0, 0, quadTextureData1);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_UV);
#endif
glDrawArrays(GL_TRIANGLE_STRIP, 0, 5);
glFlush();
bail:
#ifdef USE_GL_TEXTURE_2D
glDeleteTextures(1, &frameTextureY);
glDeleteTextures(1, &frameTextureUV);
#endif
CFRelease(foregroundLumaTexture);
CFRelease(foregroundChromaTexture);
CFRelease(destLumaTexture);
CFRelease(destChromaTexture);
// Periodic texture cache flush every frame
CVOpenGLESTextureCacheFlush(self.videoTextureCache, 0);
Here are my fragment shaders, that I use depending on different test cases (whether I draw the Y and UV separately or together in one):
static const char kFragmentShaderY[] = {
"varying highp vec2 texCoordVarying; \n \
uniform sampler2D s_texture_y; \n \
void main() \n \
{ \n \
gl_FragColor.r = texture2D(s_texture_y, texCoordVarying).r; \n \
}"
};
static const char kFragmentShaderUV[] = {
"varying highp vec2 texCoordVarying; \n \
uniform sampler2D s_texture_uv; \n \
void main() \n \
{ \n \
gl_FragColor.rg = texture2D(s_texture_uv, texCoordVarying).rg; \n \
}"
};
static const char kFragmentShaderYUV_2Textures[] = {
"varying highp vec2 texCoordVarying; \n \
uniform sampler2D s_texture_y; \n \
uniform sampler2D s_texture_uv; \n \
\n \
void main() \n \
{ \n \
mediump vec3 yuv;// = vec3(1.1643 * (texture2D(s_texture_y, texCoordVarying).r - 0.0625), \n \
lowp vec3 rgb; \n \
yuv.x = texture2D(s_texture_y, texCoordVarying).r; \n \
yuv.yz = texture2D(s_texture_uv, texCoordVarying).rg - vec2(0.5, 0.5); \n \
\n \
rgb = mat3( 1, 1, 1, \n \
0, -.21482, 2.12798, \n \
1.28033, -.38059, 0) * yuv; \n \
gl_FragColor = vec4(rgb, 1.0); \n \
}"
};
Using GL_TEXTURE_2D, if I use the fragment shader containing both the Y and UV textures, the video looks like #3 above. If I use the two separate fragment shaders (one for Y, one for UV), the picture is #2 above (ALMOST right but the chroma colors are just greens and pinks) *(mind you I do comment out some of the code above to be able to use the 2 separate fragment shaders, and of course I glBind to the GL_TEXTURE_2D and not the CV, and so on, and so on).
Again, my problem is I need to use GL_TEXTURE_2D instead of CVOpenGLESTextureGetTarget, but it doesn't show the right chroma colour if I do. I wonder what I am doing wrong. Is it something to do with the YUV format being kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange instead of kCVPixelFormatType_420YpCbCr8BiPlanarFullRange perhaps? I have also experimented with the making 3 GL_LUMINANCE textures method as well, and many other permutations with no luck.
It turns out the problem was with using GL_LUMINANCE and GL_LUMINANCE_ALPHA, which are apparently deprecated formats. When I switched them to GL_RED_EXT and GL_RG_EXT, it worked and the chroma colors are finally right. I hope this question and answer will save other people time.

Vuforia iOS SDK : Combine ImageTargets & VideoPlayback example

I'm developing an iOS app using Vuforia iOS SDK (not the unity plugin).
I want to mix the VideoPlayback and ImageTargets sample together. From the forum suggestion I started with the videoplayback sample and integrate imageTarget on it. After editing the EAGLView files the app gets both video and 3D model but doesn't show the 3D model. When the image is tracked, it gets the 3D object inside renderFrameQCAR and prints log.
I can't understand where to look at. Has anyone tried and done this without Unity support? Any help is appreciated.
This answer is based on real experience.
I call the 3d model in the videoplayback call, i mean like this
if (strcmp(imageTarget.getName(), "affogato") == 0)
{
const int kObjectScale = 500;
[self secondSetup3dObjects];
playerIndex = 99;
const QCAR::TrackableResult* result = state.getTrackableResult(i);
QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose());
int targetIndex = 9;
Object3D *obj3D = [objects3D objectAtIndex:targetIndex];
QCAR::Matrix44F modelViewProjection;
ShaderUtils::translatePoseMatrix(0.0f, 0.0f, 50.0f, &modelViewMatrix.data[0]);
ShaderUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0], &modelViewMatrix.data[0], &modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.vertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.normals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.texCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, [obj3D.texture textureID]);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (const GLfloat*)&modelViewProjection.data[0]);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glDrawArrays(GL_TRIANGLES, 0, obj3D.numVertices);
ShaderUtils::checkGlError("EAGLView renderFrameQCAR");
I hope this can help anyone...
Oh, BTW you need to put in the appdelegate a video to this target and a transparent Background to the texture to make this work like a charm
Regards
Start working on VideoPlayback project,change only in the EAGLView files for sample import the Teapot.h file in EAGLview.m
add code parts from ImageTargets the top within the namespace declaration:
const float kObjectScale = 3.0f;
const char* textureFilenames[] = {
"icon_play.png",
"icon_loading.png",
"icon_error.png",
"VuforiaSizzleReel_1.png",
"VuforiaSizzleReel_2.png",
// added for 3d model
"TextureTeapotBrass.png",
"TextureTeapotBlue.png",
"TextureTeapotRed.png"
};
add this function (from ImageTargets)
- (void)setup3dObjects
{
for (int i=0; i < [textures count]; ++i) {
Object3D* obj3D = [[Object3D alloc] init];
if (i >= 5)
{
obj3D.numVertices = NUM_TEAPOT_OBJECT_VERTEX;
obj3D.vertices = teapotVertices;
obj3D.normals = teapotNormals;
obj3D.texCoords = teapotTexCoords;
obj3D.numIndices = NUM_TEAPOT_OBJECT_INDEX;
obj3D.indices = teapotIndices;
}
obj3D.texture = [textures objectAtIndex:i];
[objects3D addObject:obj3D];
[obj3D release];
}
}
Then, in the RenderFrameQCAR, after:
int numActiveTrackables = state.getNumTrackableResults();
add codes for 3d model, comes from ImageTargets sample
for (int i = 0; i < state.getNumTrackableResults(); ++i) {
// Get the trackable
const QCAR::TrackableResult* result = state.getTrackableResult(i);
const QCAR::Trackable& trackable = result->getTrackable();
QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose());
// Choose the texture based on the target name
int targetIndex = 0; // "stones"
if (!strcmp(trackable.getName(), "chips"))
targetIndex = 1;
else if (!strcmp(trackable.getName(), "tarmac"))
targetIndex = 2;
Object3D *obj3D = [objects3D objectAtIndex:targetIndex+5];
// OpenGL 2
QCAR::Matrix44F modelViewProjection;
ShaderUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]);
ShaderUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]);
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0], &modelViewMatrix.data[0], &modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.vertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.normals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.texCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, [obj3D.texture textureID]);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (const GLfloat*)&modelViewProjection.data[0]);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glDrawElements(GL_TRIANGLES, obj3D.numIndices, GL_UNSIGNED_SHORT, (const GLvoid*)obj3D.indices);
ShaderUtils::checkGlError("EAGLView renderFrameQCAR");
}
just ensure all your stuff loaded correctly, should work just fine.

ios opengl es 2.0, my rectangle is not square when I applied ortho

I am learning opengl es 2.0.
I am trying to apply ortho projection in opengl es 2.0
I draw a sqaure but I actually don't get square shape on the screen.
And I am not sure which part I am missing.
Thank you for your help!
There are some methods in setupRenderingEnv that I did not post. but those methods are for setting up the frames and it works fine.
and m_program is created fine.
Again, thank you for your help.
// my vertex shader
attribute vec4 Position;
attribute vec4 SourceColor;
uniform mat4 Projection;
varying vec4 DestinationColor;
void main(void)
{
DestinationColor = SourceColor;
gl_Position = Projection * Position;
}
// my drawing file
typedef struct
{
float Position[3];
float Color[4];
}Vertex;
const Vertex Vertices[] =
{
{{100, -100, 0}, {1, 0, 0, 1}},
{{100, 100, 0}, {0, 1, 0, 1}},
{{-100, 100, 0}, {0, 0, 1, 1}},
{{-100, -100, 0}, {0, 0, 0, 1}}
};
const GLubyte Indices[] =
{
0, 1, 2,
2, 3, 0
};
- (void)setupRenderingEnv
{
[super setupRenderingEnv];
[self setupVertexBufferObjects];
[self setupRunLoop];
[self applyOrthoWithX:self.frame.size.width andY:self.frame.size.height];
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
}
//-- used for applying ortho in opengl es 2.0
- (void)applyOrthoWithX:(float)maxX andY:(float)maxY
{
float a = 1.0f / maxX;
float b = 1.0f / maxY;
float ortho[16] =
{
a, 0, 0, 0,
0, b, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
};
GLint projectionUniform = glGetUniformLocation(super.m_programHandle, "Projection");
glUniformMatrix4fv(projectionUniform, 1, 0, &ortho[0]);
}
//-- overriding drawCandle. it render image, draw candle
- (void)drawCandle
{
glClearColor(0, 104.0/255, 55.0/255, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
GLuint positionSlot = glGetAttribLocation(super.m_programHandle, "Position");
GLuint colorSlot = glGetAttribLocation(super.m_programHandle, "SourceColor");
glEnableVertexAttribArray(positionSlot);
glEnableVertexAttribArray(colorSlot);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid *)(sizeof(float) * 3));
glDrawElements(GL_TRIANGLES, sizeof(Indices)/sizeof(Indices[0]), GL_UNSIGNED_BYTE, 0);
glDisableVertexAttribArray(positionSlot);
glDisableVertexAttribArray(colorSlot);
[super drawCandle];
}
What shape is your viewport? If it's not square then that's the problem. The matrix you're creating - scaling by inverse width and inverse height - is going to make the width always be 1 unit wide and the height 1 unit tall. If the width and height aren't the same number of pixels, then squares won't draw square. You need to account for the aspect ratio of your viewport. Off the top of my head, I think it would be something more like this:
float ortho [ 16 ] = {
a / b, 0, 0, 0,
0, b, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
};
(I might have a/b inverted - can't remember)

iOS: GPUImage Library and VBO

I am making use of Brad Larson's wonderful GPUImage library for image manipulation. So far, it's been great. However, I'm trying to add a filter to allow Mesh Deformation and running into quite a bit of issues. Specifically, I want to have a filter that uses VBO to render the Quad so I can ultimately dynamically change the vertices for the deformation.
The first step of using VBOs is causing a crash.
I created a subclass of GPUImageFilter overriding the - (void)newFrameReadyAtTime:(CMTime)frameTime method to render a quad via VBO. NOTE: I am simply trying to render a single Quad rather than a full mesh, that way I can tackle one issue at a time.
#implementation GPUMeshImageFilter {
GLuint _positionVBO;
GLuint _texcoordVBO;
GLuint _indexVBO;
BOOL isSetup_;
}
- (void)setupBuffers
{
static const GLsizeiptr verticesSize = 4 * 2 * sizeof(GLfloat);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLsizeiptr textureSize = 4 * 2 * sizeof(GLfloat);
static const GLfloat squareTextureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
static const GLsizeiptr indexSize = 4 * sizeof(GLushort);
static const GLushort indices[] = {
0,1,2,3,
};
glGenBuffers(1, &_indexVBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexVBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexSize, indices, GL_STATIC_DRAW);
glGenBuffers(1, &_positionVBO);
glBindBuffer(GL_ARRAY_BUFFER, _positionVBO);
glBufferData(GL_ARRAY_BUFFER, verticesSize, squareVertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0);
glGenBuffers(1, &_texcoordVBO);
glBindBuffer(GL_ARRAY_BUFFER, _texcoordVBO);
glBufferData(GL_ARRAY_BUFFER, textureSize, squareTextureCoordinates, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0);
NSLog(#"Setup complete");
}
- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
if (!isSetup_) {
[self setupBuffers];
isSetup_ = YES;
}
if (self.preventRendering)
{
return;
}
[GPUImageOpenGLESContext useImageProcessingContext];
[self setFilterFBO];
[filterProgram use];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture);
glUniform1i(filterInputTextureUniform, 2);
if (filterSourceTexture2 != 0)
{
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture2);
glUniform1i(filterInputTextureUniform2, 3);
}
NSLog(#"Draw VBO");
glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
[self informTargetsAboutNewFrameAtTime:frameTime];
}
#end
Plugging in this filter, I see: "Setup complete" and "Draw VBO" displayed to the console. However, after it calls the target (in this case a GPUImageView) it crashes at the target's drawing call, which uses glDrawArrays.
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
Here is the complete method that contains this line.
- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
[GPUImageOpenGLESContext useImageProcessingContext];
[self setDisplayFramebuffer];
[displayProgram use];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat textureCoordinates[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay);
glUniform1i(displayInputTextureUniform, 4);
glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices);
glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[self presentFramebuffer];
}
Any help would be greatly appreciated, I've been banging my head against this for awhile.
It looks likely that the crash occurs because the GL_ARRAY_BUFFER is still bound when GPUImageView-newFrameReadyAtTime: executes.
Try unbinding the buffer (i.e. binding it to 0) at the end of -setupBuffers:
glBindBuffer(GL_ARRAY_BUFFER, 0);
The reason this is a problem is because GPUImage uses the same OpenGL context from one GPUImageInput (e.g. GPUImageFilter, GPUImageView) to the next. I believe largely in order that each step can output to an OpenGL texture and then have that texture directly available to the next GPUImageInput.
So because GL_ARRAY_BUFFER is still bound the behavior of glVertexAttribPointer inside GPUImageView-newFrameReadyAtTime changes, effectively trying point the displayPositionAttribute attribute to the populated VBO at an offset of imageVertices, which is nonsensical and likely to cause a crash. See the glVertexAttribPointer docs.
This code below doesn't look right to me at all. Why are you enabling vertex attrib array 4 & 5? You should enable the array at the attribute location you are intending to use.
//position vbo
glEnableVertexAttribArray(4);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0);
//texcoord vbo
glEnableVertexAttribArray(5);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0);
If your vertex attribute is at position 0, you should enable attrib 0 and set pointer for attrib 0. If it's at position 4 (which I doubt), then you should enable attrib 4 and set the pointer for position 4. I can't think of any reason it should be mismatched like you have it.
You should either get the proper position by either setting it via a layout attribute, using glBindAttribLocation before shader linking, or using glGetAttribLocation after linking.
Let me know if this doesn't make sense.

VBO glDrawElements and glVertexAttribPointer on GLES2.0 displays nothing

I can display a texture using shaders, glVertexAttribPointer and glDrawArrays like so:
Init
const GLfloat squareVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f
};
const GLfloat squareTex[] = {
0, 0,
1, 0,
0, 1,
1, 1
};
glEnableVertexAttribArray(PositionTag);
glEnableVertexAttribArray(TexCoord0Tag);
glVertexAttribPointer(PositionTag, 2, GL_FLOAT, GL_FALSE, 0, squareVertices);
glVertexAttribPointer(TexCoord0Tag, 2, GL_FLOAT, GL_FALSE, 0, squareTex);
And for draw
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
But I'm having difficulty converting to VBOs, shaders and glDrawElements. This is the code I have so far, but nothing displays:
Header
typedef struct MyVertex
{
float x, y, z; //Vertex
float nx, ny, nz; //Normal
float s0, t0; //Texcoord0
} MyVertex;
#define BUFFER_OFFSET(i) ((char *)NULL + (i))
Init
glGenBuffers(1, &VertexVBOID);
glBindBuffer(GL_ARRAY_BUFFER, VertexVBOID);
MyVertex pvertices[4];
//Fill the pvertices array
pvertices[0].x = -0.5f;
pvertices[0].y = -0.33f;
pvertices[0].z = 0.0;
pvertices[0].nx = 0.0;
pvertices[0].ny = 0.0;
pvertices[0].nz = 1.0;
pvertices[0].s0 = 0.0;
pvertices[0].t0 = 0.0;
pvertices[1].x = 0.5f;
pvertices[1].y = -0.33f;
pvertices[1].z = 0.0;
pvertices[1].nx = 0.0;
pvertices[1].ny = 0.0;
pvertices[1].nz = 1.0;
pvertices[1].s0 = 1.0;
pvertices[1].t0 = 0.0;
pvertices[2].x = -0.5f;
pvertices[2].y = 0.33f;
pvertices[2].z = 0.0;
pvertices[2].nx = 0.0;
pvertices[2].ny = 0.0;
pvertices[2].nz = 1.0;
pvertices[2].s0 = 0.0;
pvertices[2].t0 = 1.0;
pvertices[3].x = 0.5f;
pvertices[3].y = 0.33f;
pvertices[3].z = 0.0;
pvertices[3].nx = 0.0;
pvertices[3].ny = 0.0;
pvertices[3].nz = 1.0;
pvertices[3].s0 = 1.0;
pvertices[3].t0 = 1.0;
glBufferData(GL_ARRAY_BUFFER, sizeof(MyVertex)*4, NULL, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(MyVertex)*4, pvertices);
glGenBuffers(1, &IndexVBOID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IndexVBOID);
int pindices[6];
pindices[0]=0;
pindices[1]=1;
pindices[2]=2;
pindices[3]=2;
pindices[4]=1;
pindices[5]=3;
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(int)*6, NULL, GL_STATIC_DRAW);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, sizeof(int)*6, pindices);
Draw
glBindBuffer(GL_ARRAY_BUFFER, VertexVBOID);
glEnableVertexAttribArray(PositionTag);
glEnableVertexAttribArray(NormalTag);
glEnableVertexAttribArray(TexCoord0Tag);
glVertexAttribPointer(PositionTag, 3, GL_FLOAT, GL_FALSE, 32, BUFFER_OFFSET(0));
glVertexAttribPointer(NormalTag, 3, GL_FLOAT, GL_FALSE, 32, BUFFER_OFFSET(12));
glVertexAttribPointer(TexCoord0Tag, 2, GL_FLOAT, GL_FALSE, 32, BUFFER_OFFSET(24));
// glDrawRangeElements(GL_TRIANGLES, x, y, z, GL_UNSIGNED_SHORT, BUFFER_OFFSET(0));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IndexVBOID);
glDrawElements(GL_TRIANGLES, 3, GL_INT, 0);
According to here, GL_INT is not a valid type to use for indices in glDrawElements. Try using unsigned ints for your indices (and of course GL_UNSIGNED_INT in glDrawElements). You may still use your int data as indices, but as glDrawElements needs GL_UNSIGNED_INT, it would be more consistent to make the array unsigned int.
EDIT: After looking into the specification (based on your tags I took the ES 2.0 spec), they seem to further limit it to unsigned byte and unsigned short. I don't know if it is that limited in iOS, but we can conclude that the data has at least to be unsigned. On the other hand I haven't found any statement about a possible GL_INVALID_ENUM error that is thrown on a wrong type argument, but it would be reasonable to get one.
Your code doesn't look terribly wrong, so this time the devil is somewhere in the details. My guess is, that your use of a struct and its data fields' alignments don't match the offsets passed to OpenGL.
I suggest you use the offsetof() macro found in stddef.h to portably get the offsets of the data fields.

Resources