I am working on video calling in PJSIP and i am facing an issue with rendering the video using openGL.
I am able to see the real time video but in green tint color, I presume that it is an issue with openGL texture.
I dont have any knowledge on openGL so i am pasting some openGL code snippets which I use in my application.
Please tell me where I am going wrong, all I need to do is somehow remove the green tint in the video.
Below is the code to the best of my knowledge
//YUV toRGB Vertex Shader
attribute vec4 position;
attribute vec2 textureCoordinate;
varying vec2 coordinate;
void main()
{
gl_Position = position;
coordinate = textureCoordinate.xy;
}
//YUV to RGB Fragment Shader
uniform sampler2D SamplerY;
uniform sampler2D SamplerUV;
varying highp vec2 coordinate;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(SamplerY, coordinate).r;
yuv.yz = texture2D(SamplerUV, coordinate).rg - vec2(0.5, 0.5);
rgb = mat3(1, 1, 1,
0, -.21482, 2.12798,
1.28033, -.38059, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
}
static void uninitialize_gl_buffers(struct ios_stream *strm)
{
TRACE_((THIS_FILE, "uninitialize_gl_buffers"));
glDeleteFramebuffers(1, &strm->frameBufferHandle);
glDeleteRenderbuffers(1, &strm->colorBufferHandle);
}
static void initialize_gl_textures(struct ios_stream *strm)
{
TRACE_((THIS_FILE, "initialize_gl_textures"));
glGenTextures(1, &strm->lumaTexture);
glGenTextures(1, &strm->chromaUTexture);
glGenTextures(1, &strm->chromaVTexture);
}
static void uninitialize_gl_textures(struct ios_stream *strm)
{
TRACE_((THIS_FILE, "uninitialize_gl_textures"));
if (strm->lumaTexture) {
glDeleteTextures(1, &strm->lumaTexture);
strm->lumaTexture = 0;
}
if (strm->chromaUTexture) {
glDeleteTextures(1, &strm->chromaUTexture);
strm->chromaUTexture = 0;
}
if (strm->chromaVTexture) {
glDeleteTextures(1, &strm->chromaVTexture);
strm->chromaVTexture = 0;
}
}
static void allocate_gl_textures(struct ios_stream *strm,
int frameWidth, int frameHeight)
{
TRACE_((THIS_FILE, "allocate_gl_textures %dx%d", frameWidth, frameHeight));
// Y planes (init)
glActiveTexture(GL_TEXTURE0);
glGenTextures( 1, &strm->lumaTexture );
glBindTexture(GL_TEXTURE_2D, strm->lumaTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//glEnable(GL_TEXTURE_2D);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, frameWidth, frameHeight, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, 0);
// U-planes (init)
glActiveTexture(GL_TEXTURE1);
glGenTextures( 1, &strm->chromaUTexture );
glBindTexture(GL_TEXTURE_2D, strm->chromaUTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, frameWidth/2, frameHeight/2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, 0);
// V-plane (init)
glActiveTexture(GL_TEXTURE2);
glGenTextures( 1, &strm->chromaVTexture );
glBindTexture(GL_TEXTURE_2D, strm->chromaVTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, frameWidth/2, frameHeight/2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, 0);
}
static void update_gl_textures (struct ios_stream *strm)
{
//const pj_uint8_t *buffer = (const pj_uint8_t *)strm->buf;
const pj_uint8_t *buffer = (const pj_uint8_t *)strm->frame->buf;
pj_uint32_t width = strm->size.w;
pj_uint32_t height = strm->size.h;
//TRACE_((THIS_FILE, "update_gl_textures %dx%d (%d)", width, height, buffer));
// Y planes (update)
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, strm->lumaTexture);
glUniform1i(uniformLocations[UNIFORM_Y], 0);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0,
width, // source width
height, // source height
GL_LUMINANCE, GL_UNSIGNED_BYTE,
buffer);
// U-planes (update)
buffer += width*height;
width >>= 1;
height >>=1;
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, strm->chromaUTexture);
glUniform1i(uniformLocations[UNIFORM_U], 1);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0,
width, // source width
height, // source height
GL_LUMINANCE, GL_UNSIGNED_BYTE,
buffer);
// V-planes (update)
buffer += width*height;
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, strm->chromaVTexture);
glUniform1i(uniformLocations[UNIFORM_V], 2);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0,
width, // source width
height, // source height
GL_LUMINANCE, GL_UNSIGNED_BYTE,
buffer);
}
Related
The following is my create texture function:
- (GLuint)setupTexture:(NSString *)fileName {
// 1
CGImageRef spriteImage = [UIImage imageNamed:fileName].CGImage;
if (!spriteImage) {
NSLog(#"Failed to load image %#", fileName);
exit(1);
}
// 2
size_t kWidth = CGImageGetWidth(spriteImage);
size_t kHeight = CGImageGetHeight(spriteImage);
// 3
GLubyte * spriteData = (GLubyte *) calloc(kWidth*kHeight*4, sizeof(GLubyte));
// 4
GLuint texName;
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexImage2D(GL_TEXTURE_2D, 0, GL_BGRA, (int)kWidth, (int)kHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
glGenerateMipmap(GL_TEXTURE_2D);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL_APPLE, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
return texName;
}
I use GPU capture in XCode, and find an error like this:
I'm new to OpenGL, how can I solve this? Thanks very much!
Take a look at texture descriptor number in the screenshot, 391341552. It's very unlikely to have this many textures in a single process.
It seems like the issue stems not in the set-up, but rather in texture binding before drawing.
Usually, after you successfully set up your textures and vertex arrays, you draw them as follows:
glUseProgram(programDescriptor);
glBindVertexArray(vertexArrayDescriptor);
glUniform(...);
glBindTexture(GL_TEXTURE_2D, textureDescriptor);
glDrawArrays(GL_TRIANGLE_STRIP, 0, verticesQuantity);
Perhaps you passed some pointer in glBindTexture?
I have a OpenGL program for iOS.
I would like to have a repeated texture. Normaly this is not a big deal for me, because GL_REPEAT does a fine job.
You can see the problem comparing the next two images:
First image is simulator screenshot. Everything works fine.
Second image is iPad screenshot. The textur will be repeated once and the clamped to edge.
Notice that the image is repeated 4 times in each direction. The red area is the area where the texture coordinate in a direction is > 1.0. So the device shows the image (normal area) repeats the image (red area) and than clamps the image.
So I will show what I do to render the Quad.
I setup the texture
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
_location = location;
_texture = [self setupTextureByUIImage:[UIImage imageNamed:name]];
_uniform = uniform;
The setUpTextureByUIImage function is
- (GLuint) setupTextureByUIImage: (UIImage*) image {
// We want to display images
glActiveTexture(_location);
GLuint texture;
// Generate textures
glGenTextures(1, &texture);
// Bind it
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE);
// Get Image size
GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Allocate memory for image
void *imageData = malloc( height * width * 4 );
CGContextRef imgcontext = CGBitmapContextCreate( imageData, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
CGColorSpaceRelease( colorSpace );
CGContextClearRect( imgcontext, CGRectMake( 0, 0, width, height ) );
CGContextTranslateCTM( imgcontext, 0, height - height );
CGContextDrawImage( imgcontext, CGRectMake( 0, 0, width, height ), image.CGImage );
// Generate texture in opengl
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
// Release context
CGContextRelease(imgcontext);
// Free Stuff
free(imageData);
return texture;
}
After setup I begin to draw an object.
First I call makeActiveAndBind.
Then I draw.
Then I call unbind.
- (void) makeActiveAndBind
{
glActiveTexture(_location);
glBindTexture(GL_TEXTURE_2D, _texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glUniform1i(_uniform, _location - GL_TEXTURE0);
}
- (void) unbind
{
glBindTexture(GL_TEXTURE_2D, 0);
}
Does anyone have any idea to this strange behavior?
The texture coordinates of the cube are [0,4]x[0,4]
sorry for my english
I want to display video from a file, where the frames of 4 bytes per pixel, BRGA, 1280x720?
on mac I just took out the frame and drew this glDrawPixels, running on a Mac but in opengl es all differently.
here's the code from the mac
int pos = 0;
NSData *data = [[NSData alloc] initWithContentsOfFile:#"video.raw"];
glViewport(0,0,width,height);
glLoadIdentity();
glOrtho(0, width, 0, height, -1.0, 1.0);
glPixelZoom(1, -1);
glClear(GL_COLOR_BUFFER_BIT);
//glRasterPos2i(0, height);
glRasterPos2i(0, 0);
glDrawPixels(1280, 720, GL_BGRA, GL_UNSIGNED_BYTE, [data bytes]+pos);
glFinish();
Push those data to texture with "glTexSubImage2D" and render the texture. Note though that texture has to be of power of 2 so for your case you can make it (2048, 1024) but you may update only the (1280, 720) part:
CGSize videoSize;
CGSize textureSize;
GLuint dimension = 1;
while (videoSize.width > dimension) {
dimension <<= 1;
}
textureSize = CGSizeMake(dimension, .0f);
dimension = 1;
while (videoSize.height > dimension) {
dimension <<= 1;
}
textureSize = CGSizeMake(textureSize.width, dimension);
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textureSize.width, textureSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
GLfloat textureCoordinates[] = {
.0f, .0f,
.0f, videoSize.height/textureSize.height,
videoSize.width/textureSize.width, .0f,
videoSize.width/textureSize.width, videoSize.height/textureSize.height
};
To update the texture:
void *data;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, data);
Then just draw your textured quad.
I have get the h.264 data and decoder them to YUV buffers,and i have display the yup data by opengl on ios5 and ios6,but when I try to run it on my iPad(ios4.2.1),it can not display currently,just all the screen green color.I don't know why,here is my code:
-(void)playVideoData:(void *)data
{
if (!_textureY)
{
glGenTextures(1, &_textureY);
glGenTextures(1, &_textureU);
glGenTextures(1, &_textureV);
}
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW, _videoH, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, y);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, u);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, v);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
[self render];
}
- (void)render
{
glViewport(viewportx,viewporty, VIEWWIDTH, VIEWHEIGHT);
glClearColor(0.0, 0, 0.0, 0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(programId);
// Update uniform value
//glUniform1f(uniforms[UNIFORM_TRANSLATE], 0.0f);
GLuint textureUniformY = glGetUniformLocation(programId, "SamplerY");
GLuint textureUniformU = glGetUniformLocation(programId, "SamplerU");
GLuint textureUniformV = glGetUniformLocation(programId, "SamplerV");
// Update attribute values
glVertexAttribPointer(ARDRONE_ATTRIB_POSITION, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ARDRONE_ATTRIB_POSITION);
glVertexAttribPointer(ARDRONE_ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, coordVertices);
glEnableVertexAttribArray(ARDRONE_ATTRIB_TEXCOORD);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glUniform1i(textureUniformY, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glUniform1i(textureUniformU, 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glUniform1i(textureUniformV, 2);
}
-(void)drawFrame2
{
if (context != nil)
{
//make it the current context for rendering
[EAGLContext setCurrentContext:context];
//if our framebuffers have not been created yet, do that now!
if (!defaultFramebuffer)
[self createFramebuffer];
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
[self playVideoData];
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
else
NSLog(#"Context not set!");
}
I have put the data to the y u v buffer
here is my fsh and vsh:
vsh:
attribute vec4 position; // 1
//uniform float translate;
attribute vec2 TexCoordIn; // New
varying vec2 TexCoordOut; // New
void main(void)
{
gl_Position = position; // 6
TexCoordOut = TexCoordIn;
}
fsh:
varying lowp vec2 TexCoordOut;
uniform sampler2D SamplerY;
uniform sampler2D SamplerU;
uniform sampler2D SamplerV;
void main(void)
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(SamplerY, TexCoordOut).r;
yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;
yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;
rgb = mat3( 1, 1, 1,
0, -0.39465, 2.03211,
1.13983, -0.58060, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
}
iOs4.2 no erros ,the shaders Compile ok, but just can not display...
This is because the GL_RED_EXT extension that you are using to upload your textures was only added in iOS 5.0, and isn't present in iOS 4.2. You won't be able to use that to upload your YUV textures in this manner, so you'll need to rewrite that part of your code. Also, I believe this extension is only supported for iPad 2 and newer devices, not the original iPad and older iPhones, so you won't be able to use it on even iOS 5+ for the older ones.
I'm trying to offscreen render using this post
http://www.idevgames.com/forums/thread-1785-post-54500.html#pid54500
First I init FBO.
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 512, 512, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glBindTexture(GL_TEXTURE_2D, 0);
glGenFramebuffersOES(1, &fbo);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, fbo);
glFramebufferTexture2DOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_TEXTURE_2D, tex, 0);
GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES);
if (status != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", status);
exit(-1);
}
Then I draw to my texture
GLint oldFBO, oldViewPort[4];
glGetIntegerv(GL_FRAMEBUFFER_BINDING_OES, &oldFBO);
glGetIntegerv(GL_VIEWPORT, oldViewPort);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, listFrameBuffers[i]);
glViewport(0, 0, 512, 512);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// draw code here
glBindFramebufferOES(GL_FRAMEBUFFER_OES, oldFBO);
glViewport(oldViewPort[0], oldViewPort[1], oldViewPort[2], oldViewPort[3]);
And then each frame I draw it.
When I finish my frame calling glSwapBuffers texture attached to framebuffer becomes clear
(but when I draw to frame buffer each time after glSwapBuffers everything is ok).
So, the quest is completed.
I just changed draw code to this
GLint oldFBO, oldViewPort[4];
glGetIntegerv(GL_FRAMEBUFFER_BINDING_OES, &oldFBO);
glGetIntegerv(GL_VIEWPORT, oldViewPort);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, listFrameBuffers[i]);
glViewport(0, 0, 512, 512);
//glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // HERE
glDisable(GL_DEPTH_TEST); // AND HERE
// draw code here
glBindFramebufferOES(GL_FRAMEBUFFER_OES, oldFBO);
glViewport(oldViewPort[0], oldViewPort[1], oldViewPort[2], oldViewPort[3]);
and it works! :)