I copied this code straight out of a book and I don't know what the problem is. I am getting an EXC BAD ACCESS runtime error on the very last line where glTexImage2d is called. I think it's coming from the pixelData pointer. But I don't know what the problem is. If any geniuses out there could help i'd greatly appreciate it. I am using Xcode 6.
#import "TextureHelper.h"
#implementation TextureHelper
-(void) createTextureFromImage: (NSString *) picName{
UIImage *pic = [UIImage imageNamed:picName];
int scaleFactor = 1;
float iOSVersion = [[[UIDevice currentDevice] systemVersion] floatValue];
if(iOSVersion >= 4.0){
if(pic.scale >= 2){
scaleFactor = pic.scale;
}
}
if(pic){
//set texture dimentions
width = pic.size.width * scaleFactor;
height = pic.size.height * scaleFactor;
/*
if ((width & (width-1)) !=0 || (height & height-1)) != 0 || width> 2048 || height > 2048) {
NSLog(#"ERROR:%# width and/or height is not power of 2 or is > 2048!", picName);
}
*/
GLubyte *pixelData = [self generatePixelDataFromImage:pic];
[self generateTexture:pixelData];
int memory = width*height*4;
NSLog(#"%#, Size:%i KB, ID:%i", picName, memory/1024, textureID);
free(pixelData);
}else{
NSLog(#"ERROR:%# not found, texture not created.",picName);
}
}
-(GLubyte *) generatePixelDataFromImage: (UIImage *) pic{
GLubyte *pixelData = (GLubyte *) calloc(width * height *4, sizeof(GLubyte));
CGColorSpaceRef imageCS = CGImageGetColorSpace(pic.CGImage);
CGBitmapInfo bitmapInfo = (CGBitmapInfo) kCGImageAlphaPremultipliedLast;
CGContextRef gc = CGBitmapContextCreate(pixelData, width, height, 8, width*4, imageCS, bitmapInfo);
CGContextDrawImage(gc, CGRectMake(0, 0, width, height), pic.CGImage);
CGContextRelease(gc);
return pixelData;
}
-(void) generateTexture: (GLubyte * ) pixelData{
//Create GL Texture
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &pixelData);
}
#end
I fixed this because I stupidly included the & symbol in pixelData.
Related
I'm drawing text with OpenGL ES 2.0, but the result doesn't look good (especially the bottom of "t"):
(It looks kinda pixelized and blurred)
I don't know exactly why since I load the font in high res (200px or 400px) for example.
I draw each char in an image, that compose the texture atlas. So all the chars can be displayed at any time without loading. I scale the texture with openGL (MVP matrix), but it shouldn't look as bad anyway.
Here's an UIImage of the char "t" that I retrieve with the method below, before it's processed with openGL :
(You can see it look way better without OpenGL)
Here's the code I use to create an image from a char :
- (id)initWithString:(NSString *)string dimensions:(CGSize)dimensions alignment:(NSTextAlignment)alignment fontName:(NSString *)name fontSize:(CGFloat)size {
NSUInteger width, height, i;
CGContextRef context;
void *data;
CGColorSpaceRef colorSpace;
UIFont *font;
font = [UIFont fontWithName:name size:size];
width = (NSUInteger)dimensions.width;
height = (NSUInteger)dimensions.height;
while(dimensions.width > kMaxTextureSize && size > 0) {
font = [UIFont fontWithName:name size: --size];
dimensions = [string sizeWithAttributes:#{NSFontAttributeName: font}];
width = (GLuint)NextPowerOfTwo((int)dimensions.width);
height = (GLuint)NextPowerOfTwo((int)dimensions.height);
}
colorSpace = CGColorSpaceCreateDeviceRGB();
data = calloc(1, width * height * 4);
context = CGBitmapContextCreate(data,
width, height,
8,
4 * width,
colorSpace,
kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextSetRGBFillColor(context, 0.f, 0.f, 0.f, 1);
CGContextTranslateCTM(context, 0.0, height);
CGContextScaleCTM(context, 1.0f, -1.0f); //NOTE: NSString draws in UIKit referential i.e. renders upside-down compared to CGBitmapContext referential
UIGraphicsPushContext(context);
NSMutableParagraphStyle *style = [[NSParagraphStyle defaultParagraphStyle] mutableCopy];
[style setLineBreakMode:NSLineBreakByWordWrapping];
UIColor *whiteColor = [UIColor whiteColor];
NSDictionary *attrsDictionary = #{
NSFontAttributeName: font,
NSBaselineOffsetAttributeName: #1.0F,
NSParagraphStyleAttributeName: style,
NSForegroundColorAttributeName: whiteColor
};
CGRect destRect = CGRectMake(0, 0, dimensions.width, dimensions.height);
CGContextSetTextDrawingMode(context, kCGTextFill);
CGContextSetAllowsAntialiasing(context, YES);
CGContextSetShouldAntialias(context, YES);
CGContextSetShouldSmoothFonts(context, YES);
[string drawInRect:destRect
withAttributes:attrsDictionary];
CGImageRef cgImageFinal = CGBitmapContextCreateImage(context);
UIGraphicsPopContext();
self = [self initWithData:data pixelFormat:GL_RGBA
pixelsWide:width pixelsHigh:height];
CGContextRelease(context);
free(data);
return self;
}
And here's the code to retrieve a openGL texture from it :
- (id)initWithData:(const void *)data
pixelFormat:(GLenum)pixelFormat
pixelsWide:(NSUInteger)width
pixelsHigh:(NSUInteger)height {
if ((self = [super init])) {
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, &_texture);
glBindTexture(GL_TEXTURE_2D, _texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
switch (pixelFormat) {
case GL_RGBA:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
break;
case GL_RGB:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, data);
break;
case GL_ALPHA:
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, width, height, 0, GL_ALPHA, GL_UNSIGNED_BYTE, data);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#""];
}
self.width = width;
self.height = height;
}
return self;
}
Any help would be really much appreciated !
I'm trying to load a screenshot into an OpenGLES Texture, but when i display it, it is just black.
Here is how I get the screenshot:
UIGraphicsBeginImageContext(self.bounds.size);
[self.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage* image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
and here is how I'm trying to load the image into the Texture:
GLuint setupTextureFromImage(UIImage* image) {
CGImageRef spriteImage = image.CGImage;
if (!spriteImage) {
NSLog(#"Failed to load image");
exit(1);
}
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte * spriteData = (GLubyte *) calloc(width*height*4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData, width, height, 8, width*4, CGImageGetColorSpace(spriteImage), kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Host);
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
GLuint texName;
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int) width, (int) height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
free(spriteData);
return texName;
}
if I load an image from the resources with [UIImage imageNamed: #"FileName.png"] it works and if I'm setting the screenshot as the background color it also works, so both parts work separately
I am using OpenGL to do my drawing on screen and want to draw a portion of the screen to a UIImage. This is code I've cobbled together looking at examples, which works, but produces a memory leak every time it's called. I've tested this on device and the leak still persists and eventually causes a crash. The returned UIImage is saved to an instance variable and is later definitely set to nil again. What is causing the memory leak here?
- (UIImage *)renderToImageWithContentFrame:(CGRect)contentFrame
exportFrame:(CGRect)exportFrame
scale:(float)scale {
float screenWidth = contentFrame.size.width * scale;
float screenHeight = contentFrame.size.height * scale;
GLuint framebuffer;
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
GLuint renderTex;
glGenTextures(1, &renderTex);
glBindTexture(GL_TEXTURE_2D, renderTex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, screenWidth, screenHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderTex, 0);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glViewport(0, 0, screenWidth, screenHeight);
[self _renderImageGL];
// grabbing image from FBO
NSInteger dataLength = screenWidth * screenHeight * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(0, 0, screenWidth, screenHeight, GL_RGBA, GL_UNSIGNED_BYTE, data);
glDeleteFramebuffers(1, &framebuffer);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(screenWidth, screenHeight, 8, 32, screenWidth * 4, colorspace,
kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
float x = (exportFrame.origin.x - contentFrame.origin.x) * scale;
float y = (exportFrame.origin.y - contentFrame.origin.y) * scale;
float width = exportFrame.size.width * scale;
float height = exportFrame.size.height * scale;
CGImageRef cropped = CGImageCreateWithImageInRect(iref, (CGRect){x, y, width, height});
UIGraphicsBeginImageContext((CGSize){width, height});
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, (CGRect){0, 0, width, height}, cropped);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
CGImageRelease(cropped);
return image;
}
The variable renderTex is not being freed (via a call to glDeleteTextures(1, &renderTex);). My guess is that Instruments may not know about OpenGL-allocated memory because that memory may reside on a graphics card and may be harder (or impossible) to track. (Though that's mainly a guess.)
sorry for my english
I want to display video from a file, where the frames of 4 bytes per pixel, BRGA, 1280x720?
on mac I just took out the frame and drew this glDrawPixels, running on a Mac but in opengl es all differently.
here's the code from the mac
int pos = 0;
NSData *data = [[NSData alloc] initWithContentsOfFile:#"video.raw"];
glViewport(0,0,width,height);
glLoadIdentity();
glOrtho(0, width, 0, height, -1.0, 1.0);
glPixelZoom(1, -1);
glClear(GL_COLOR_BUFFER_BIT);
//glRasterPos2i(0, height);
glRasterPos2i(0, 0);
glDrawPixels(1280, 720, GL_BGRA, GL_UNSIGNED_BYTE, [data bytes]+pos);
glFinish();
Push those data to texture with "glTexSubImage2D" and render the texture. Note though that texture has to be of power of 2 so for your case you can make it (2048, 1024) but you may update only the (1280, 720) part:
CGSize videoSize;
CGSize textureSize;
GLuint dimension = 1;
while (videoSize.width > dimension) {
dimension <<= 1;
}
textureSize = CGSizeMake(dimension, .0f);
dimension = 1;
while (videoSize.height > dimension) {
dimension <<= 1;
}
textureSize = CGSizeMake(textureSize.width, dimension);
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textureSize.width, textureSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
GLfloat textureCoordinates[] = {
.0f, .0f,
.0f, videoSize.height/textureSize.height,
videoSize.width/textureSize.width, .0f,
videoSize.width/textureSize.width, videoSize.height/textureSize.height
};
To update the texture:
void *data;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, data);
Then just draw your textured quad.
I have 6 squares made up of 2 trangles, each of which is supposed to have a different texture mapped onto it. Instead, each texture is having the last binded texture on it instead of its own. Heres my drawView and setView:
- (void)drawView:(GLView*)view
{
glBindTexture(GL_TEXTURE_2D, texture[0]);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static const Vertex3D vertices[] = {
{0,0, 1}, //TL
{ 1024,0, 1}, //TR
{0,-1024, 1}, //BL
{ 1024.0f, -1024.0f, 1} //BR
};
static const GLfloat texCoords[] = {
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
};
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
- (void)setupView:(GLView*)view {
// Bind the number of textures we need.
glGenTextures(1, &texture[0]);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_GENERATE_MIPMAP,GL_TRUE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glLoadIdentity();
NSString *path = [[NSBundle mainBundle] pathForResource:filename ofType:#"jpg"];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
NSLog(#"Do real error checking here");
GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc( height * width * 4 );
CGContextRef context = CGBitmapContextCreate( imageData, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
// Flip the Y-axis
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGColorSpaceRelease( colorSpace );
CGContextClearRect( context, CGRectMake( 0, 0, width, height ) );
CGContextDrawImage( context, CGRectMake( 0, 0, width, height ), image.CGImage );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
CGContextRelease(context);
free(imageData);
}
You're always using texture[0], so you will indeed get the same texture every time. You need to pass the id of the texture you want to glBindTexture ().
I think the problem is related to the texture binding and in particular to this line:
glBindTexture(GL_TEXTURE_2D, texture[0]);
Double check that you use the right value of the gluint required for the texture binding.
Are you using shaders? In case double check it as well though it is most probably not the case.
I suggest to use texture atlas in order to not kill the overall engine's performances by binding every time a different texture in the GPU.