I'm learning OpenGL 2.0 for iPhone.
The first, i'm trying to draw a background with some texture. It works only while i'm not drawing anything else.
Here is code to draw background:
[EAGLContext setCurrentContext:context];
// Clear the buffer
glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
//Draw something here
if (backgroundTextureCreated == YES) {
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureVertices[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
#warning BG drawing here.
/*_ddp_positionSlot = glGetAttribLocation(_ddp_program, "position");
_ddp_inputTextureCoordinateSlot = glGetAttribLocation(_ddp_program, "inputTextureCoordinate");
glEnableVertexAttribArray(_ddp_positionSlot);
glEnableVertexAttribArray(_ddp_inputTextureCoordinateSlot);
_ddp_TextureUniform = glGetUniformLocation(_ddp_program, "bg");*/
glBindTexture(GL_TEXTURE_2D, BackgroundTexture.id);
glUniform1i(_ddp_TextureUniform, 0);
glVertexAttribPointer(_ddp_positionSlot, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(_ddp_positionSlot);
glVertexAttribPointer(_ddp_inputTextureCoordinateSlot, 2, GL_FLOAT, 0, 0, textureVertices);
glEnableVertexAttribArray(_ddp_inputTextureCoordinateSlot);
glUseProgram(_ddp_program);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
And here is code to draw points:
- (void)renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
static GLfloat* vertexBuffer = NULL;
static NSUInteger vertexMax = 128;
NSUInteger vertexCount = 0,
count,
i;
[EAGLContext setCurrentContext:context];
glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
glBindTexture(GL_TEXTURE_2D, brushTexture.id);
// Convert locations from Points to Pixels
CGFloat scale = self.contentScaleFactor;
start.x *= scale;
start.y *= scale;
end.x *= scale;
end.y *= scale;
// Allocate vertex array buffer
if(vertexBuffer == NULL)
vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
// Add points to the buffer so there are drawing points every X pixels
count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1);
for(i = 0; i < count; ++i) {
if(vertexCount == vertexMax) {
vertexMax = 2 * vertexMax;
vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
}
vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
NSLog(#"Vertex coords: %f,%f",
start.x + (end.x - start.x) * (GLfloat)i / (GLfloat)count,
start.y + (end.y - start.y) * (GLfloat)i / (GLfloat)count );
vertexCount += 1;
}
// Load data to the Vertex Buffer Object
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertexCount*2*sizeof(GLfloat), vertexBuffer, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 0, 0);
// Draw
glUseProgram(program[PROGRAM_POINT].id);
glDrawArrays(GL_POINTS, 0, vertexCount);
//glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
renderLineFromPoint function has been taken from apple GLPaint sample code.
Background drawing stops working when i draw line from point.
What's wrong?
I had a very similar problem.
I was able to draw a texture as background. Whenever I started painting with my fingers on the canvas, the background disappeared (seems to be your problem, too).
My drawing code was all right (and yours seems to be too). The problem was, that after setting the background and before painting with my fingers the method resizeFromLayer: got called, which then called renderBufferStorage:fromDrawable:. The latter method flushes the backing buffer (see renderBufferStorage: fromDrawable).
Since you also use GLPaint as starting point, I guess this could be the cause of your problem, too.
Related
I have an in-memory "bitmap" which is just a malloced Byte * array that contains pixel data in a simple RGB format (so the size of the byte array is 3 * the number of pixels).
My app is just a view controller with an instance of GLKView. I have implemented its delegate like so:
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
}
and as expected this sets the background color of the GLKView.
What I would now like to do is add code to this implementation of glkView:drawInRect: so that my "bitmap" is rendered into this GLKView. But I can't seem to find any way of doing that simply; I'm kind of overwhelmed by all the different things OpenGL can do, all of which are much more complex than what I'm trying to do here.
glReadPixels seems to sort of be what I'm after here as it seems to provide a pointer to the buffer data.
Edit: apparently this can only be accomplished via the use of textures. I have attempted to implement this with this sample code (note that my "bitmap" here is 4 bytes per sample, matching the format parameters):
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// raw data
int width = 30;
int height = 30;
int pixelCount = width * height;
int byteSize = pixelCount * 4;
GLubyte *textureData = (GLubyte *)malloc(byteSize);
for (int i = 0; i < byteSize; i++) {
textureData[i] = 255; // white
}
glEnable(GL_TEXTURE_2D);
GLuint textureID;
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0,
GL_RGBA, GL_UNSIGNED_BYTE, textureData);
free(textureData);
}
... but it is not working. The glClear() call works as expected and sets the entire background red; but if I understand the texture samples correctly, the code below that should be drawing a 30x30 white square in the corner, but all I'm getting is the solid red background.
Can anybody spot what I'm doing wrong here?
Sorry about pushing into OpenGL :)
I would create CGImage and then UIImage
void *baseAddress = & textureData;
size_t bytesPerRow = width * 4;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef cgImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:cgImage];
and then draw it with drawInRect:
Edit:
Here is code I've written. This is not production code: it lacks checking for openGL errors, some minor leaks and have global variables but it's good place to enhance it.
#import "ViewController.h"
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#interface ViewController ()
#end
#implementation ViewController
GLuint tex;
float vertices[] = {
// Position Texcoords
-1.0f, 1.0f, 0.0f, 0.0f, // Top-left
1.0f, 1.0f, 1.0f, 0.0f, // Top-right
1.0f, -1.0f, 1.0f, 1.0f, // Bottom-right
-1.0, -1.0f, 0.0f, 1.0f // Bottom-left
};
const char * vertexShader = "attribute vec2 position;\n"
"attribute vec2 TexCoordIn;\n"
"varying vec2 TexCoordOut;\n"
"void main() {\n"
"gl_Position = vec4(position, 0.0, 1.0);\n"
"TexCoordOut = TexCoordIn;\n"
"}\n";
const char * fragmentShader = "precision mediump float;\n"
"varying lowp vec2 TexCoordOut;\n"
"uniform sampler2D Texture;\n"
"void main() {\n"
"gl_FragColor = texture2D(Texture, TexCoordOut);\n"
"}\n";
GLuint shaderProgram;
GLuint vao;
GLuint vbo;
-(void) initOpenGLObjects {
glGenVertexArraysOES(1, &vao);
glBindVertexArrayOES(vao);
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vertexShader, NULL);
glCompileShader(vs);
GLint status;
glGetShaderiv(vs, GL_COMPILE_STATUS, &status);
char buffer[512];
glGetShaderInfoLog(vs, 512, NULL, buffer);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fragmentShader, NULL);
glCompileShader(fs);
glGetShaderiv(fs, GL_COMPILE_STATUS, &status);
glGetShaderInfoLog(fs, 512, NULL, buffer);
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vs);
glAttachShader(shaderProgram, fs);
glLinkProgram(shaderProgram);
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
GLint texAttrib = glGetAttribLocation(shaderProgram, "TexCoordIn");
glGenBuffers(1, &vbo); // Generate 1 buffer
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 4*sizeof(float), 0);
glVertexAttribPointer(texAttrib, 2, GL_FLOAT, GL_FALSE, 4*sizeof(float), (void*)(2*sizeof(float)));
glEnableVertexAttribArray(posAttrib);
glEnableVertexAttribArray(texAttrib);
}
-(void) viewDidLoad{
[super viewDidLoad];
self.glkView.context = [[EAGLContext alloc] initWithAPI:
kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:self.glkView.context];
CADisplayLink *displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(update)];
[displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];
self.glkView.delegate = self;
[self initOpenGLObjects];
}
- (void)update
{
[self.glkView setNeedsDisplay];
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
int width = 300;
int height = 300;
int pixelCount = width * height;
int byteSize = pixelCount * 4;
GLubyte *textureData = (GLubyte *)malloc(byteSize);
static int time = 0;
time = (time+1)%256;
for (int i = 0; i < byteSize; i+=4) {
textureData[i] = 255;
textureData[i+1] = time;
textureData[i+2] = 255;
textureData[i+3] = 255;
}
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0,
GL_RGBA, GL_UNSIGNED_BYTE, textureData);
glUseProgram(shaderProgram);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
free(textureData);
}
#end
I don't know how do you want to update data, maybe you don't have to malloc/free it every time and just do small changes and update it with glTexSubImage2D. Most of the time is spend on filling data.
This code worked on my MacBook with Xcode 7 in simulators and iPhone 6 with iOS 9.0.2.
I'm having an issue where I have an open GL ES 2.0 based view that first draws to a texture and then renders that texture to the screen.
I instantiate this view as a subview of a main UIView with other buttons. The texture for some reason doesn't render immediately when drawn, rather only after one of the buttons is pressed.
Has anyone experienced anything like this? Thank you in advance
Uba Duba
Here is the drawing and rendering code:
// Drawings a line onscreen based on where the user touches
- (void)renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
static GLfloat* vertexBuffer = NULL;
static NSUInteger vertexMax = 64;
NSUInteger vertexCount = 0,
count,
i;
[EAGLContext setCurrentContext:context];
glBindFramebuffer(GL_FRAMEBUFFER, fbTemp);
// Convert locations from Points to Pixels
CGFloat scale = self.contentScaleFactor;
start.x *= scale;
start.y *= scale;
end.x *= scale;
end.y *= scale;
// Allocate vertex array buffer
if(vertexBuffer == NULL)
vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
// Add points to the buffer so there are drawing points every X pixels
count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1);
for(i = 0; i < count; ++i) {
if(vertexCount == vertexMax) {
vertexMax = 2 * vertexMax;
vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
}
vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
vertexCount += 1;
}
// Load data to the Vertex Buffer Object
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertexCount*2*sizeof(GLfloat), vertexBuffer, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(pointShader.positionLoc);
glVertexAttribPointer(pointShader.positionLoc, 2, GL_FLOAT, GL_FALSE, 0, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, brushTexture.id);
// Draw
glUseProgram(pointShader.programObject);
// the brush texture will be bound to texture unit 0
glUniform1i(pointShader.textureLoc, 0);
// viewing matrices
GLKMatrix4 projectionMatrix = GLKMatrix4MakeOrtho(0, backingWidth, 0, backingHeight, -1, 1);
GLKMatrix4 modelViewMatrix = GLKMatrix4Identity; // this sample uses a constant identity modelView matrix
GLKMatrix4 MVPMatrix = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix);
glUniformMatrix4fv(pointShader.MVPLoc, 1, GL_FALSE, MVPMatrix.m);
// point size
glUniform1f(pointShader.pointSizeLoc, brushTexture.width / kBrushScale);
// initialize brush color
glUniform4fv(pointShader.vertexColorLoc, 1, brushColor);
glDrawArrays(GL_POINTS, 0, vertexCount);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
[self drawResult];
}
and the drawResult function that does the rendering:
// Draw current result at the present render buffer.
- (void) drawResult
{
NSLog(#"Calling Draw Result");
// Bind the master frame buffer.
glBindFramebuffer(GL_FRAMEBUFFER, fbMaster);
// Clear the buffer.
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
// Draw the background board.
[self drawTexture:masterTexture ];
// Draw the temporary (drawing) board.
[self drawTexture:tempTexture];
// Bind the render buffer and present it to the view's context.
glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
the draw texture function that binds the texture
// Draw a texture at the full screen quad
- (void) drawTexture:(GLuint) textureID
{
// Positions to draw a texture at the full screen quad.
const GLfloat vVerticesStraight[] = {
-1.0f, 1, 0.0f, // Position 0
0.0f, 1.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1
0.0f, 0.0f, // TexCoord 1
1.0f, -1.0f, 0.0f, // Position 2
1.0f, 0.0f, // TexCoord 2
1.0f, 1.0f, 0.0f, // Position 3
1.0f, 1.0f // TexCoord 3
};
// Index order for draw element.
const GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
// Use the program object
glUseProgram ( drawTexture.programObject );
// Load the vertex position
glVertexAttribPointer ( drawTexture.a_positionLoc, 3, GL_FLOAT,
GL_FALSE, 5 * sizeof(GLfloat), vVerticesStraight );
// Load the texture coordinate
glVertexAttribPointer ( drawTexture.a_texCoordLoc, 2, GL_FLOAT,
GL_FALSE, 5 * sizeof(GLfloat), &vVerticesStraight[3] );
// Enable attribute for the vertex array.
glEnableVertexAttribArray ( drawTexture.a_positionLoc );
glEnableVertexAttribArray ( drawTexture.a_texCoordLoc );
// Bind the texture.
glActiveTexture(GL_TEXTURE0);
glBindTexture ( GL_TEXTURE_2D, textureID);
glUniform1i ( drawTexture.s_textureLoc, 0 );
// Draw the texture.
glDrawElements ( GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices );
// Bind the current texture to the default texture.
glBindTexture(GL_TEXTURE_2D, 0);
}
There is no background threading, what's very strange is the first drawing seems to render to the texture but not to the screen, but after pressing another button on the view, the texture renders to the screen.
I'm trying to render an array of VBOs but the results are not right. I am saving them into an array when I draw them the first time using the GLPaint (OpenGLES v2) setup. Here is the sample project I have if you want to help me out. https://drive.google.com/file/d/0B0pG5vRVzBTzUTZPYWNoenhkcWs/edit?usp=sharing
////// Store VBOs when drawing this is done in renderLineFromPoint (from GLPaint)
- (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
static GLfloat* vertexBuffer = NULL;
static NSUInteger vertexMax = 64;
NSUInteger vertexCount = 0,
count,
i;
[EAGLContext setCurrentContext:context];
glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
// Convert locations from Points to Pixels
CGFloat scale = self.contentScaleFactor;
start.x *= scale;
start.y *= scale;
end.x *= scale;
end.y *= scale;
// Allocate vertex array buffer
if(vertexBuffer == NULL)
vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
// Add points to the buffer so there are drawing points every X pixels
count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1);
for(i = 0; i < count; ++i)
{
if(vertexCount == vertexMax)
{
vertexMax = 2 * vertexMax;
vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
}
vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
vertexCount += 1;
}
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertexCount*2*sizeof(GLfloat), vertexBuffer, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 0, 0);
// Render the vertex array
glVertexPointer(2, GL_FLOAT, 0, vertexBuffer);
glDrawArrays(GL_POINTS, 0, vertexCount);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
// Store VBO for undo
VBOHolder *vboHolder = [[VBOHolder alloc]init];
vboHolder.vertexCount = vertexCount;
vboHolder.vbo = vertexBuffer;
[vboHolderArray addObject:vboHolder];
}
////// Here is the problem: I clear the screen and then try to re-render the VBOs
-(void)renderSavedVertexBufferes
{
for (VBOHolderModel *vboh in vboHolderArray)
{
if (vboh.vertexCount == 0)
{
continue;
}
NSUInteger vertexCount = vboh.vertexCount;
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, vertexCount*2*sizeof(GLfloat), vboh.vbo, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 0, 0);
// Render the vertex array
glVertexPointer(2, GL_FLOAT, 0, vboh.vbo);
glDrawArrays(GL_POINTS, 0, vertexCount);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
}
////// VBOHolderModel looks like this, it wraps the vbo so it can be put in NSMutableArray
#interface VBOHolderModel : NSObject
{
GLfloat *vbo;
NSUInteger vertexCount;
}
It's basically an undo... when pressing undo it should remove only the 3 because it was the last stroke, but it erases it all and draws the dots shown in the second image.
I found the answer to this question in the lines below
// Allocate vertex array buffer
if(vertexBuffer == NULL)
vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
The problems is that the stored Vertex Arrays are all the same! It was only allocating memory once on init when vertexBuffer is null. Some of the other code has change a bit to, if you want the updates, let me know.
I'm trying to make a screenshot on my iPad with OpenGL ES. This does work, but there are blank spots on them. These blank spots seem to be the rendered object. I've tried using the other buffers aswell, but none of them seem to contain the actual 3D object?
I'm using the example code of String SDK.
Image of the issue:
EAGLView.m
- (void)createFramebuffer
{
if (context && !defaultFramebuffer)
{
[EAGLContext setCurrentContext:context];
// Handle scale
if ([self respondsToSelector:#selector(setContentScaleFactor:)])
{
float screenScale = [UIScreen mainScreen].scale;
self.contentScaleFactor = screenScale;
}
// Create default framebuffer object.
glGenFramebuffers(1, &defaultFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &framebufferWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &framebufferHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
// Create and attach depth buffer
glGenRenderbuffers(1, &depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, framebufferWidth, framebufferHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer);
// Bind color buffer
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
NSLog(#"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
Screenshot code
EAGLView.m
- (UIImage*)snapshot:(UIView*)eaglview
{
GLint backingWidth, backingHeight;
// Bind the color renderbuffer used to render the OpenGL ES view
// If your application only creates a single color renderbuffer which is already bound at this point,
// this call is redundant, but it is needed if you're dealing with multiple renderbuffers.
// Note, replace "_colorRenderbuffer" with the actual name of the renderbuffer object defined in your class.
//glBindRenderbufferOES(GL_RENDERBUFFER_OES, _colorRenderbuffer);
// Get the size of the backing CAEAGLLayer
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0, width = backingWidth, height = backingHeight;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
NSInteger widthInPoints, heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions) {
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else {
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
return image;
}
String_OGL_TutorialViewController.m
- (void)render
{
[(EAGLView *)self.view setFramebuffer];
glDisable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
const int maxMarkerCount = 10;
struct MarkerInfoMatrixBased markerInfo[10];
int markerCount = [stringOGL getMarkerInfoMatrixBased: markerInfo maxMarkerCount: maxMarkerCount];
for (int i = 0; i < markerCount; i++)
{
float diffuse[4] = {0, 0, 0, 0};
diffuse[markerInfo[i].imageID % 3] = 1;
if ([context API] == kEAGLRenderingAPIOpenGLES2)
{
glUseProgram(program);
glUniform4fv(uniforms[UNIFORM_COLOR], 1, diffuse);
const float translationMatrix[16] = {1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, -cubeScale, 1};
float modelViewMatrix[16];
float modelViewProjectionMatrix[16];
[String_OGL_TutorialViewController multiplyMatrix: translationMatrix withMatrix: markerInfo[i].transform into: modelViewMatrix];
[String_OGL_TutorialViewController multiplyMatrix: modelViewMatrix withMatrix: projectionMatrix into: modelViewProjectionMatrix];
glUniformMatrix4fv(uniforms[UNIFORM_MVP], 1, GL_FALSE, modelViewProjectionMatrix);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, ((float *)NULL) + 6 * 4 * 3);
// Validate program before drawing. This is a good check, but only really necessary in a debug build.
// DEBUG macro must be defined in your debug configurations if that's not already the case.
#if defined(DEBUG)
if (![self validateProgram:program])
{
NSLog(#"Failed to validate program: %d", program);
return;
}
#endif
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_BYTE, NULL);
}
else
{
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
glLightfv(GL_LIGHT0, GL_DIFFUSE, diffuse);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, 12, NULL);
glEnableClientState(GL_NORMAL_ARRAY);
glNormalPointer(GL_FLOAT, 12, ((float *)NULL) + 6 * 4 * 3);
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(projectionMatrix);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glMultMatrixf(markerInfo[i].transform);
glTranslatef(0, 0, -cubeScale);
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_BYTE, NULL);
}
UIImage *img = [(EAGLView *)self.view snapshot: self.view];
UIImageWriteToSavedPhotosAlbum(img, self, #selector(image:didFinishSavingWithError:contextInfo:), nil);
[stringOGL pause];
}
}
I am drawing a pixel using GLKit. I can successfully draw the pixel at (10, 10) coordinates if I have:
glClearColor(0.65f, 0.65f, 0.65f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Prepare the effect for rendering
[self.effect prepareToDraw];
GLfloat points[] =
{
10.0f, 10.0f,
};
glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
GLuint bufferObjectNameArray;
glGenBuffers(1, &bufferObjectNameArray);
glBindBuffer(GL_ARRAY_BUFFER, bufferObjectNameArray);
glBufferData(
GL_ARRAY_BUFFER,
sizeof(points),
points,
GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(
GLKVertexAttribPosition,
2,
GL_FLOAT,
GL_FALSE,
2*4,
NULL);
glDrawArrays(GL_POINTS, 0, 1);
But I want to decide at runtime how many and exactly where I want to draw pixels, so I tried this but it is drawing pixel at (10, 0), something's wrong here:
glClearColor(0.65f, 0.65f, 0.65f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Prepare the effect for rendering
[self.effect prepareToDraw];
GLfloat *points = (GLfloat*)malloc(sizeof(GLfloat) * 2);
for (int i=0; i<2; i++) {
points[i] = 10.0f;
}
glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
GLuint bufferObjectNameArray;
glGenBuffers(1, &bufferObjectNameArray);
glBindBuffer(GL_ARRAY_BUFFER, bufferObjectNameArray);
glBufferData(
GL_ARRAY_BUFFER,
sizeof(points),
points,
GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(
GLKVertexAttribPosition,
2,
GL_FLOAT,
GL_FALSE,
2*4,
NULL);
glDrawArrays(GL_POINTS, 0, 1);
Kindly help me out.
Edit:
Problem
Actually the problem is: I can't figure out what is the difference between:
GLfloat points[] =
{
10.0f, 10.0f,
};
AND
GLfloat *points = (GLfloat*)malloc(sizeof(GLfloat) * 2);
for (int i=0; i<2; i++) {
points[i] = 10.0f;
}
My bet will be that the problem is the sizeof(points) in the glBufferData data call: in this case it will return the size of the pointer which is a word i.e. 4 bytes (or something like this). You should pass the real size of your array that will be the same what you calculate in the malloc code.