OpenGL live record not working. - ios

I've got a uiview that implements openGL for drawing. I'm trying to implement a basic recording feature, as shown here.
All of the setup works fine, as does the openGL drawing. But I get an error on the third or fourth frame that the AVAdaptor tries to append (the first few go ok).
I think it's a problem with the CVPixelBufferPoolCreatePixelBuffer call, but I can't track it down...
Here's the code. The recording code is near the bottom:
#import "ochrDrawingView.h"
#define POINT_FREQUENCY_CONSTANT 0.3 // 1 is a point for every pixel of distance. Note: performance drops above 0.5 on long lines.
#define ARBITRARY_BUFFER_SIZE 2048
typedef struct {
GLfloat Position[2];
GLfloat TextureCoordinates[2];
} TexturedVertex;
typedef struct {
TexturedVertex BottomLeft;
TexturedVertex BottomRight;
TexturedVertex TopLeft;
TexturedVertex TopRight;
} TexturedSquare;
#pragma mark - Implementation
#implementation ochrDrawingView {
// OpenGL
EAGLContext *context;
CAEAGLLayer *eaglLayer;
GLuint renderBuffer;
GLuint frameBuffer;
GLKBaseEffect *effect;
GLint backingWidth;
GLint backingHeight;
// Drawing
CGSize brushSize;
float brushScale;
BOOL isFirstTouch;
CGPoint origin;
CGPoint midpoint;
CGPoint destination;
// Concurrency
dispatch_queue_t recordingQueue;
// Video Recording
AVAssetWriter *videoWriter;
AVAssetWriterInput *videoWriterInput;
AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
BOOL isRecording;
BOOL hasFinishedRecording;
NSDate *recordingBeganAt;
void* bitmapData;
}
#pragma mark - Setup
- (id)initWithCoder:(NSCoder *)aDecoder {
NSLog(#"About to initWithCoder...");
self = [super initWithCoder:aDecoder];
if (self) {
isRecording = NO;
[self setupLayer];
[self setupContext];
[self setupRenderBuffer];
[self setupFrameBuffer];
[self setViewportParameters];
[self setupBaseEffect];
[self eraseScreen];
recordingQueue = dispatch_queue_create("recordingQueue", NULL);
}
return self;
}
+ (Class) layerClass {
return [CAEAGLLayer class];
}
- (void) setupContext {
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:context];
}
- (void) setupLayer {
eaglLayer = (CAEAGLLayer *) self.layer;
[eaglLayer setOpaque:YES];
[eaglLayer setDrawableProperties: [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]];
}
- (void)setupRenderBuffer {
glGenRenderbuffers(1, &renderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:eaglLayer];
}
- (void) setViewportParameters {
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
NSLog(#"Width: %d, Height: %d", backingWidth, backingHeight);
glViewport(0, 0, backingWidth, backingHeight);
}
- (void) setupFrameBuffer {
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBuffer);
}
- (void) setupBaseEffect {
effect = [[GLKBaseEffect alloc] init];
// Load the texture into the effect
NSError *error;
NSDictionary *options = #{ GLKTextureLoaderOriginBottomLeft: #YES };
NSString *path = [[NSBundle mainBundle] pathForResource:#"brush.png" ofType:nil];
GLKTextureInfo *texture = [GLKTextureLoader textureWithContentsOfFile:path options:options error:&error];
if (texture == nil) NSLog(#"Texture failed to load. Error: %#", error.localizedDescription);
effect.texture2d0.name = texture.name;
effect.texture2d0.enabled = GL_TRUE;
// Set the brushSize (used later, in drawing method)
brushSize = CGSizeMake(texture.width, texture.height);
brushScale = 0.6;
// Set up a project and modelview matrix. maybe:
GLKMatrix4 projectionMatrix = GLKMatrix4MakeOrtho(0, backingWidth, 0, backingHeight, 0, 1.0f);
effect.transform.projectionMatrix = projectionMatrix;
[effect prepareToDraw];
}
- (void) eraseScreen {
[EAGLContext setCurrentContext:context];
// Clear the buffer
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glClearColor(1.0, 1.0, 1.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
#pragma mark - Touch Response
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
isFirstTouch = YES;
origin = [[touches anyObject] locationInView:self];
origin.y = backingHeight - origin.y;
[self drawPointAtPoint:origin];
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
if (isFirstTouch) {
isFirstTouch = NO;
midpoint = [[touches anyObject] locationInView:self];
midpoint.y = backingHeight - midpoint.y;
} else {
destination = [[touches anyObject] locationInView:self];
destination.y = backingHeight - destination.y;
[self drawCubicLineFromPoint:origin HalfwayToPoint:destination WithControl:midpoint];
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
[self touchesMoved:touches withEvent:event];
}
#pragma mark - Drawing Algorithms
- (void)drawCubicLineFromPoint:(CGPoint)start HalfwayToPoint:(CGPoint)end WithControl:(CGPoint)control {
static GLfloat tempVertexBuffer[ARBITRARY_BUFFER_SIZE];
static GLubyte tempIndexBuffer[ARBITRARY_BUFFER_SIZE];
int vertexCount = 0;
int indexCount = 0;
float pointCount;
// [EAGLContext setCurrentContext:context];
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
// Get the number of points to be drawn between the two points
float distance = sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y));
pointCount = MAX(ceilf(distance * POINT_FREQUENCY_CONSTANT), 1);
// Adjust the size of the brush based on the (rough) speed of the stroke
if (distance > 20) {
brushScale = 0.5;
} else {
brushScale = 0.6;
}
// Get the adjustment value used to center each texture (the brush image is a square, so here I use only width)
float positionAdjustmentToCenterTexture = brushScale * brushSize.width / 2.0;
// Iterate through the points to be drawn, drawing a TexturedSquare (more or less) for each.
float t = 0.0, x, y;
for (float i = 0; i < pointCount; i++) {
x = powf(1 - t, 2) * start.x + 2 * (1 - t) * t * control.x + t * t * end.x;
y = powf(1 - t, 2) * start.y + 2 * (1 - t) * t * control.y + t * t * end.y;
t += 0.5 / pointCount;
// Bottom-left vertex
tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 0;
tempVertexBuffer[4 * vertexCount + 3] = 0;
vertexCount++;
// Bottom-right vertex
tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 1;
tempVertexBuffer[4 * vertexCount + 3] = 0;
vertexCount++;
// Top-left vertex
tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 0;
tempVertexBuffer[4 * vertexCount + 3] = 1;
vertexCount++;
// Top-right vertex
tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 1;
tempVertexBuffer[4 * vertexCount + 3] = 1;
vertexCount++;
// Add the indices for the triangles
tempIndexBuffer[indexCount++] = vertexCount - 4;
tempIndexBuffer[indexCount++] = vertexCount - 3;
tempIndexBuffer[indexCount++] = vertexCount - 2;
tempIndexBuffer[indexCount++] = vertexCount - 3;
tempIndexBuffer[indexCount++] = vertexCount - 2;
tempIndexBuffer[indexCount++] = vertexCount - 1;
}
origin = CGPointMake(x, y); // sets the origin to the last point drawn
midpoint = end; // sets the midpoint to the previous destination.
long pointer = (long)&tempVertexBuffer;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, Position));
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, TextureCoordinates));
glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_BYTE, tempIndexBuffer);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
// If recording, record this frame
if (isRecording) {
dispatch_async(recordingQueue, ^{
[self writeCurrentFrame];
});
}
}
- (void)drawStraightLineFromPoint:(CGPoint)start ToPoint:(CGPoint)end {
GLfloat tempVertexBuffer[128];
GLubyte tempIndexBuffer[128];
int vertexCount = 0;
int indexCount = 0;
float pointCount;
// Dumb setup stuff.
[EAGLContext setCurrentContext:context];
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
// Get the number of points to be drawn between the two points
pointCount = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) * POINT_FREQUENCY_CONSTANT), 1);
// Get the adjustment value used to center each texture (the brush image is a square, so here I use only width)
float positionAdjustmentToCenterTexture = brushScale * brushSize.width / 2.0;
// Iterate through the points to be drawn, drawing a TexturedSquare (more or less) for each.
float x;
float y;
for (float i = 0; i < pointCount; i++) {
// Set the x and y coordinates for each points, interpolating based on the distance
x = start.x + ((end.x - start.x) * (i / pointCount));
y = start.y + (end.y - start.y) * (i / pointCount);
// Bottom-left vertex
tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 0;
tempVertexBuffer[4 * vertexCount + 3] = 0;
vertexCount++;
// Bottom-right vertex
tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 1;
tempVertexBuffer[4 * vertexCount + 3] = 0;
vertexCount++;
// Top-left vertex
tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 0;
tempVertexBuffer[4 * vertexCount + 3] = 1;
vertexCount++;
// Top-right vertex
tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 1;
tempVertexBuffer[4 * vertexCount + 3] = 1;
vertexCount++;
// Add the indices for the triangles
tempIndexBuffer[indexCount++] = vertexCount - 4;
tempIndexBuffer[indexCount++] = vertexCount - 3;
tempIndexBuffer[indexCount++] = vertexCount - 2;
tempIndexBuffer[indexCount++] = vertexCount - 3;
tempIndexBuffer[indexCount++] = vertexCount - 2;
tempIndexBuffer[indexCount++] = vertexCount - 1;
}
long pointer = (long)&tempVertexBuffer;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, Position));
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, TextureCoordinates));
// Last parameter may be wrong...
glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_BYTE, tempIndexBuffer);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
- (void)drawPointAtPoint:(CGPoint)start {
GLfloat tempVertexBuffer[16];
GLubyte tempIndexBuffer[6];
int vertexCount = 0;
int indexCount = 0;
// Dumb setup stuff.
// [EAGLContext setCurrentContext:context];
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
// Get the adjustment value used to center each texture (the brush image is a square, so here I use only width)
float positionAdjustmentToCenterTexture = brushScale * brushSize.width / 2.0;
// Iterate through the points to be drawn, drawing a TexturedSquare (more or less) for each.
float x = start.x;
float y = start.y;
// Bottom-left vertex
tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 0;
tempVertexBuffer[4 * vertexCount + 3] = 0;
vertexCount++;
// Bottom-right vertex
tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 1;
tempVertexBuffer[4 * vertexCount + 3] = 0;
vertexCount++;
// Top-left vertex
tempVertexBuffer[4 * vertexCount + 0] = x - positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 0;
tempVertexBuffer[4 * vertexCount + 3] = 1;
vertexCount++;
// Top-right vertex
tempVertexBuffer[4 * vertexCount + 0] = x + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 1] = y + positionAdjustmentToCenterTexture;
tempVertexBuffer[4 * vertexCount + 2] = 1;
tempVertexBuffer[4 * vertexCount + 3] = 1;
vertexCount++;
// Add the indices for the triangles
tempIndexBuffer[indexCount++] = vertexCount - 4;
tempIndexBuffer[indexCount++] = vertexCount - 3;
tempIndexBuffer[indexCount++] = vertexCount - 2;
tempIndexBuffer[indexCount++] = vertexCount - 3;
tempIndexBuffer[indexCount++] = vertexCount - 2;
tempIndexBuffer[indexCount++] = vertexCount - 1;
long pointer = (long)&tempVertexBuffer;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, Position));
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *)pointer + offsetof(TexturedVertex, TextureCoordinates));
// Last parameter may be wrong...
glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_BYTE, tempIndexBuffer);
// Display the buffer
glBindRenderbuffer(GL_RENDERBUFFER, renderBuffer);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
#pragma mark - Video Recorder Functions
- (BOOL) setupVideoWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self temporaryFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:bufferAttributes];
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (CGContextRef) getContextOfSize:(CGSize)size {
CGContextRef tempContext = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = size.width * 4;
bitmapByteCount = bitmapBytesPerRow * size.height;
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
tempContext = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8,
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(tempContext, NO);
if (tempContext == NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease(colorSpace);
return tempContext;
}
- (void) writeCurrentFrame {
NSLog(#"writeCurrentFrame called");
// Get a context
CGContextRef videoContext = [self getContextOfSize:self.frame.size];
// Render the current screen into that context
[self.layer renderInContext:videoContext];
// Get a CGImage from the context
CGImageRef cgImage = CGBitmapContextCreateImage(videoContext);
// Check if the AVAssetWriterInput is ready for more data
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
} else {
// If it is, convert the CGImage into a CVPixelBufferReference
CVPixelBufferRef pixelBuffer = NULL;
CFDataRef cfImage = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if (status != 0) {
NSLog(#"Error creating pixel buffer. Status: %d", status);
} else {
NSLog(#"No error creating the pixel buffer...");
}
// Set image data into pixel buffer
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(cfImage, CFRangeMake(0, CFDataGetLength(cfImage)), destPixels);
// If all's well so far, append the pixelbuffer to the adaptor
if (status == 0) {
float millisecondsSinceStart = [[NSDate date] timeIntervalSinceDate:recordingBeganAt];
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMake((int) millisecondsSinceStart, 1000)];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video.");
} else {
NSLog(#"Success! Was able to write buffer to video.");
}
}
// Clean up
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
// CVPixelBufferRelease(pixelBuffer);
CFRelease(cfImage);
CGImageRelease(cgImage);
CGContextRelease(videoContext);
}
}
- (BOOL) completeRecordingSession {
return YES;
}
- (NSURL *) temporaryFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Will try to store the file at %#", outputURL);
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSLog(#"There is already a file there - trying to delete it...");
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
} else {
NSLog(#"Sucessfully deleted file. The new file can be stored at %#", outputURL);
}
} else {
NSLog(#"File can be stored at %#", outputURL);
}
return outputURL;
}
- (BOOL) startRecording {
isRecording = YES;
return [self setupVideoWriter];;
}
- (BOOL) stopRecording {
return YES;
}
#pragma mark - Helper Functions
- (void) logLocationOfPoint:(CGPoint)point {
NSLog(#"point at { %d, %d }", (int) point.x, (int) point.y);
}
#end

For anyone curious:
The above method takes a flawed approach. In general, it's much better to render into a texture, and then from the texture into a renderbuffer. The performance hit you take on the extra step is minimal, and this approach is much more expandable in terms of streaming, recording, processing, etc.
Best. S

Related

Flood fill with gradient

I am trying to flood fill a UIImage with gradient. I already implemented this for solid colors. I tried to flood fill with UIColor colorWithPatternImage, but it's filling the image with weird colors. Could someone please point me to the right direction. Here's the method I am using for flood fill
- (UIImage *) floodFillFromPoint:(CGPoint)startPoint withColor:(UIColor *)newColor andTolerance:(int)tolerance
{
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef imageRef = [self CGImage];
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
unsigned char *imageData = malloc(height * width * 4);
NSUInteger bytesPerPixel = CGImageGetBitsPerPixel(imageRef) / 8;
NSUInteger bytesPerRow = CGImageGetBytesPerRow(imageRef);
NSUInteger bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
CGContextRef context = CGBitmapContextCreate(imageData,
width,
height,
bitsPerComponent,
bytesPerRow,
colorSpace,
CGImageGetBitmapInfo(imageRef));
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
//Get color at start point
unsigned int byteIndex = (bytesPerRow * startPoint.y) + startPoint.x * bytesPerPixel;
unsigned int ocolor = getColorCode(byteIndex, imageData);
//Convert newColor to RGBA value so we can save it to image.
int newRed, newGreen, newBlue, newAlpha;
const CGFloat *components = CGColorGetComponents(newColor.CGColor);
/*
If you are not getting why I use CGColorGetNumberOfComponents than read following link:
http://stackoverflow.com/questions/9238743/is-there-an-issue-with-cgcolorgetcomponents
*/
if(CGColorGetNumberOfComponents(newColor.CGColor) == 2)
{
newRed = newGreen = newBlue = components[0] * 255;
newAlpha = components[1];
}
else if (CGColorGetNumberOfComponents(newColor.CGColor) == 4)
{
newRed = components[0] * 255;
newGreen = components[1] * 255;
newBlue = components[2] * 255;
newAlpha = 255;
}
unsigned int ncolor = (newRed << 24) | (newGreen << 16) | (newBlue << 8) | newAlpha;
/*
We are using stack to store point.
Stack is implemented by LinkList.
To incress speed I have used NSMutableData insted of NSMutableArray.
To see Detail Of This implementation visit following leink.
http://iwantmyreal.name/blog/2012/09/29/a-faster-array-in-objective-c/
*/
LinkedListStack *points = [[LinkedListStack alloc] initWithCapacity:500 incrementSize:500 andMultiplier:height];
int x = startPoint.x;
int y = startPoint.y;
[points pushFrontX:x andY:y];
/*
This algorithem is prety simple though it llook odd in Objective C syntex.
To get familer with this algorithm visit following link.
http://lodev.org/cgtutor/floodfill.html
You can read hole artical for knowledge.
If you are familer with flood fill than got to Scanline Floodfill Algorithm With Stack (floodFillScanlineStack)
*/
unsigned int color;
BOOL spanLeft,spanRight;
while ([points popFront:&x andY:&y] != INVALID_NODE_CONTENT)
{
/* if (spanRight) {
newRed = newRed-1;
newBlue = newBlue+1;
}
if (spanLeft) {
newBlue = newBlue-1;
newRed = newRed+1;
}*/
byteIndex = (bytesPerRow * y) + x * bytesPerPixel;
color = getColorCode(byteIndex, imageData);
while(y >= 0 && compareColor(ocolor, color, tolerance))
{
y--;
if(y >= 0)
{
byteIndex = (bytesPerRow * y) + x * bytesPerPixel;
color = getColorCode(byteIndex, imageData);
}
}
y++;
spanLeft = spanRight = NO;
byteIndex = (bytesPerRow * y) + x * bytesPerPixel;
color = getColorCode(byteIndex, imageData);
while (y < height && compareColor(ocolor, color, tolerance) )
{
// NSLog(#"compareColor");
//Change old color with newColor RGBA value
/* if (compareColor(ncolor, color, tolerance)) {
newRed = 255;
newGreen = 255;
newBlue = 255;
newAlpha = 0;
}*/
imageData[byteIndex + 0] = newRed;
imageData[byteIndex + 1] = newGreen;
imageData[byteIndex + 2] = newBlue;
imageData[byteIndex + 3] = newAlpha;
if(x > 0)
{
byteIndex = (bytesPerRow * y) + (x - 1) * bytesPerPixel;
color = getColorCode(byteIndex, imageData);
if(!spanLeft && x > 0 && compareColor(ocolor, color, tolerance))
{
[points pushFrontX:(x - 1) andY:y];
spanLeft = YES;
}
else if(spanLeft && x > 0 && !compareColor(ocolor, color, tolerance))
{
spanLeft = NO;
}
}
if(x < width - 1)
{
byteIndex = (bytesPerRow * y) + (x + 1) * bytesPerPixel;
color = getColorCode(byteIndex, imageData);
if(!spanRight && compareColor(ocolor, color, tolerance))
{
[points pushFrontX:(x + 1) andY:y];
spanRight = YES;
}
else if(spanRight && !compareColor(ocolor, color, tolerance))
{
spanRight = NO;
}
}
y++;
if(y < height)
{
byteIndex = (bytesPerRow * y) + x * bytesPerPixel;
color = getColorCode(byteIndex, imageData);
}
}
}
//Convert Flood filled image row data back to UIImage object.
CGImageRef newCGImage = CGBitmapContextCreateImage(context);
UIImage *result = [UIImage imageWithCGImage:newCGImage];
CGImageRelease(newCGImage);
CGContextRelease(context);
free(imageData);
return result;
}
#catch (NSException *exception)
{
NSLog(#"Exception : %#", exception);
}
}
This is what I am using for flood fill with gradient
filledImage = [_imageView.image floodFillFromPoint:CGPointMake((touchPoint.x*ScaleFactor)/_zoomScrollview.zoomScale,( touchPoint.y*ScaleFactor)/_zoomScrollview.zoomScale) withColor:[UIColor colorWithPatternImage:[UIImage imageNamed:#"color-book.png"]] andTolerance:5.0 ];
Here is the gradient image I am using:
But showing weird colors like this
I am working on a coloring book app.so it should look like as it would look on any coloring book app after applying gradient.
Can anyone tell me where the problem lies or what can I do to resolve it?

Objective-C OpenGL how to rotate, scale

I have the following source code where i can draw some objects to the screen but i want to rotate and scale the drawed object and nothing happens when i am touching the screen. (In the log i can see if i am doing a pan gesture or a pinch gesture so the detection is ok)
What am i doing wrong?
UPDATE
Now it moves (instead of rotation) maybe i rotate it not around the center of the pointcloud. I tried to move it to center but now nothing is on the screen. Here is the updated Code:
#import <QuartzCore/QuartzCore.h>
#import <OpenGLES/EAGLDrawable.h>
#import "EAGLView.h"
#import "VertexPoint.h"
#import "CamOnMap-Swift.h"
#import <Math.h>
#define USE_DEPTH_BUFFER 1
#define DEGREES_TO_RADIANS(__ANGLE) ((__ANGLE) / 180.0 * M_PI)
// A class extension to declare private methods
#interface EAGLView ()
#property (nonatomic, retain) EAGLContext *context;
//#property (nonatomic, assign) NSTimer *animationTimer;
- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
#end
#implementation EAGLView
#synthesize context;
//#synthesize animationTimer;
//#synthesize animationInterval;
// You must implement this method
+ (Class)layerClass {
return [CAEAGLLayer class];
}
-(id)initWithCoder:(NSCoder *)aDecoder{
NSLog(#"Ide eljutottam ez az initWithCoder");
TOUCH_ROT_FACTOR = (180.0 / 320.0);
if ((self = [super initWithCoder:aDecoder])) {
// Get the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
if (!context || ![EAGLContext setCurrentContext:context]) {
//[self release];
return nil;
}
// animationInterval = 1.0 / 60.0;
[self setupView];
}
return self;
}
- (void)setupView {
const GLfloat zNear = 0.1, zFar = 1000.0, fieldOfView = 60.0;
GLfloat size;
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
//Grab the size of the screen
CGRect rect = self.frame;
glFrustumf(-size, size,
-size / (rect.size.width / rect.size.height),
size / (rect.size.width / rect.size.height),
zNear, zFar);
glViewport(0, 0, rect.size.width, rect.size.height);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
//Initialize touchLocation to 0, 0
// touchLocation = CGPointMake(rect.size.width/2, rect.size.height/2);
// touchLocation = CGPointMake(0, 0);
// GLfloat x = (touchLocation.x);
// GLfloat y = (touchLocation.y);
//translate the triangle
// glTranslatef(x, y, -10.0);
multitouch_distance_start = 0;
viewDistance = -100;
[self setupGestures];
[self getPly];
}
- (void)setupGestures{
[self setUserInteractionEnabled:true];
[self setMultipleTouchEnabled:true];
pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:#selector(pinchHandler:)];
panGesture =[[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(panHandler:)];
[self addGestureRecognizer:pinchGesture];
[self addGestureRecognizer:panGesture];
};
-(VertexPoint*)center:(NSMutableArray *)vertices2{
float sum_x =0.0f;
float sum_y =0.0f;
float sum_z =0.0f;
for (int i=0; i<vertices2.count; i++) {
VertexPoint *point =vertices2[i];
sum_x +=point.x;
sum_y +=point.y;
sum_z +=point.z;
}
center_ = [[VertexPoint alloc] init];
GLfloat point[] = {(sum_x / vertices2.count),(sum_y / vertices2.count), (sum_z / vertices2.count)};
[center_ setPosition:point];
return center_;
}
-(float)multiTouchDistance:(UIPinchGestureRecognizer *) recognizer{
float x1 = [recognizer locationOfTouch:0 inView:self].x;
float y1 = [recognizer locationOfTouch:0 inView:self].y;
float x2 = [recognizer locationOfTouch:1 inView:self].x;
float y2 = [recognizer locationOfTouch:1 inView:self].y;
float dx = x2 - x1;
float dy = y2 - y1;
return sqrt(dx*dx + dy*dy);
}
- (void)pinchHandler:(UIPinchGestureRecognizer *)recognizer {
glPopMatrix();
float d = [self multiTouchDistance:recognizer];
NSLog(#"d: %f",d);
if(d > 0.0f)multitouch_distance_start = d;
viewDistance *= multitouch_distance_start / d;
multitouch_distance_start = d;
//glScalef(recognizer.scale, recognizer.scale, recognizer.scale);
//recognizer.scale = 1;
NSLog(#"viewDistance: %f",viewDistance);
glPushMatrix();
[self render];
}
- (void)panHandler:(UIPanGestureRecognizer *)recognizer {
if(recognizer.state == UIGestureRecognizerStateBegan){
CGPoint translation = [recognizer translationInView:self];
xPrev = translation.x;
yPrev = translation.y;
}
if(recognizer.state == UIGestureRecognizerStateChanged){
glPopMatrix();
CGPoint translation = [recognizer translationInView:self];
GLfloat dx = translation.x - xPrev;
GLfloat dy = translation.y - yPrev;
xAngle += ((dx) * (TOUCH_ROT_FACTOR));
yAngle += ((dy) * (TOUCH_ROT_FACTOR));
// glRotatef(xAngle,1,0,0);
// glRotatef(yAngle,0,1,0);
xPrev = translation.x;
yPrev = translation.y;
glPushMatrix();
NSLog(#"pan");
[self render];
}
};
-(void)completionHandler:(NSString *) data{
// NSLog(#"%#",data);
NSArray *dataArray = [data componentsSeparatedByString:#"\n"];
vertices = [[NSMutableArray alloc] init];
for(NSInteger i = 14; i < dataArray.count; i++ ){
NSArray *row = [dataArray[i] componentsSeparatedByString:#" "];
if(![dataArray[i] isEqual: #""]){
VertexPoint *point = [[VertexPoint alloc] init];
GLfloat a[] = { [[row objectAtIndex:0] floatValue], [[row objectAtIndex:1] floatValue], [[row objectAtIndex:2] floatValue]};
[point setPosition:a];
GLfloat b[] = { [[row objectAtIndex:6] floatValue],[[row objectAtIndex:7] floatValue], [[row objectAtIndex:8] floatValue], [[row objectAtIndex:9] floatValue]};
[point setColor:b];
[vertices addObject:point];
}
}
long vcount = [vertices count] * 3; // * 2 for the two coordinates of a loc object
glVertices = (GLfloat *)malloc(vcount * sizeof(GLfloat));
int currIndex = 0;
for (VertexPoint *loc in vertices) {
glVertices[currIndex++] = (loc.x);
glVertices[currIndex++] =(loc.y);
glVertices[currIndex++] =(loc.z);
}
long ccount = [vertices count] * 4;
glColors = (GLfloat *)malloc(ccount * sizeof(GLfloat));
int currColorIndex = 0;
for(VertexPoint *color in vertices){
glColors[currColorIndex++] = (color.r);
glColors[currColorIndex++] = (color.g);
glColors[currColorIndex++] = (color.b);
glColors[currColorIndex++] = (color.a);
}
center_ = [self center: vertices];
glTranslatef(0, 0, -100);
// glTranslatef(-1 * center_.x, -1 * center_.y, -1 * center_.z);
NSLog(#"x: %f, y: %f, z:%f",-1 * center_.x,-1 * center_.y,-1 * center_.z);
[self drawView];
};
-(void)getPly{
GlobalDataFunctions *globalDataFunctions = [[GlobalDataFunctions alloc] init];
NSString *ply = [globalDataFunctions selectedPly];
NSLog(#"%#",ply);
RestClient *restClient = [[RestClient alloc] init];
NSString *username = [[NSUserDefaults standardUserDefaults] objectForKey:#"username"];
NSString *password = [[NSUserDefaults standardUserDefaults] objectForKey:#"password"];
[restClient getPly: username password:password ply:ply myComletionHandler: ^(NSString *data){
[self completionHandler:data];
}];
};
- (void)drawView {
NSLog(#"drawView");
//setting up the draw content
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
//Draw stuff
//reset matrix to identity
glLoadIdentity();
//rough approximation of screen to current 3D space
GLfloat x = (touchLocation.x - 160.0) / 38.0;
GLfloat y = (240.0 - touchLocation.y) / 38.0;
//translate the triangle
glTranslatef(x, y, -1.0);
[self render];
}
-(void)render{
NSLog(#"render");
center_ = [self center: vertices];
// glTranslatef(-1 * center_.x, -1 * center_.y, -1 * center_.z);
//clear the back color back to our original color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//rough approximation of screen to current 3D space
//set the format and location for verticies
glVertexPointer(3, GL_FLOAT, 0, glVertices);
//set the opengl state
glEnableClientState(GL_VERTEX_ARRAY);
//set the format and location for colors
glColorPointer(4, GL_FLOAT, 0, glColors);
//set the opengl state
glEnableClientState(GL_COLOR_ARRAY);
glPointSize(3);
//draw the triangles
// glTranslatef(0,0,viewDistance);
// glTranslatef(-1 * center_.x, -1 * center_.y, -1 * center_.z);
glDrawArrays(GL_POINTS, 0, vertices.count);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
//show the render buffer
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
touchLocation = [touch locationInView:self];
NSLog(#"touch");
[self drawView];
}
- (void)layoutSubviews {
[EAGLContext setCurrentContext:context];
[self destroyFramebuffer];
[self createFramebuffer];
[self drawView];
}
- (BOOL)createFramebuffer {
glGenFramebuffersOES(1, &viewFramebuffer);
glGenRenderbuffersOES(1, &viewRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
if (USE_DEPTH_BUFFER) {
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
}
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(#"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
return NO;
}
return YES;
}
- (void)destroyFramebuffer {
glDeleteFramebuffersOES(1, &viewFramebuffer);
viewFramebuffer = 0;
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
viewRenderbuffer = 0;
if(depthRenderbuffer) {
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
depthRenderbuffer = 0;
}
}
And here can you find a sample input data: http://pastebin.com/rcrfZdyt

iOS: Feather with glow/shadow effect on uiimage

I am trying to find a way to apply feather effect with shadow around the UIImage, not UIImageView I have in iOS, I couldn't find any perfect solution yet. I have an idea that it might be done by masking, but I am very new to CoreGraphics.
If anyone can help.
Thanks.
OK so:
I was looking for same thing, but unfortunately with no luck.
I decided to create my own feather(ing) code.
add this code to the UIImage extension, and then call [image featherImageWithDepth:4], 4 is just example.
Try to keep depth as low as possible.
//==============================================================================
- (UIImage*)featherImageWithDepth:(int)featherDepth {
// First get the image into your data buffer
CGImageRef imageRef = [self CGImage];
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
unsigned char *rawData = (unsigned char*) calloc(height * width * 4, sizeof(unsigned char));
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
CGContextRef context = CGBitmapContextCreate(rawData, width, height,
bitsPerComponent, bytesPerRow, colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
// Now your rawData contains the image data in the RGBA8888 pixel format.
NSUInteger byteIndex = 0;
NSUInteger rawDataCount = width*height;
for (int i = 0 ; i < rawDataCount ; ++i, byteIndex += bytesPerPixel) {
NSInteger alphaIndex = byteIndex + 3;
if (rawData[alphaIndex] > 100) {
for (int row = 1; row <= featherDepth; row++) {
if (testBorderLayer((long)alphaIndex,
rawData,
(long)rawDataCount,
(long)width,
(long)height,
row)) {
int destinationAlpha = 255 / (featherDepth+1) * (row + 1);
double alphaDiv = (double)destinationAlpha / (double)rawData[alphaIndex];
rawData[alphaIndex] = destinationAlpha;
rawData[alphaIndex-1] = (double)rawData[alphaIndex-1] * alphaDiv;
rawData[alphaIndex-2] = (double)rawData[alphaIndex-2] * alphaDiv;
rawData[alphaIndex-3] = (double)rawData[alphaIndex-3] * alphaDiv;
// switch (row) {
// case 1:
// rawData[alphaIndex-1] = 255;
// rawData[alphaIndex-2] = 0;
// rawData[alphaIndex-3] = 0;
// break;
// case 2:
// rawData[alphaIndex-1] = 0;
// rawData[alphaIndex-2] = 255;
// rawData[alphaIndex-3] = 0;
// break;
// case 3:
// rawData[alphaIndex-1] = 0;
// rawData[alphaIndex-2] = 0;
// rawData[alphaIndex-3] = 255;
// break;
// case 4:
// rawData[alphaIndex-1] = 127;
// rawData[alphaIndex-2] = 127;
// rawData[alphaIndex-3] = 0;
// break;
// case 5:
// rawData[alphaIndex-1] = 127;
// rawData[alphaIndex-2] = 0;
// rawData[alphaIndex-3] = 127;
// case 6:
// rawData[alphaIndex-1] = 0;
// rawData[alphaIndex-2] = 127;
// rawData[alphaIndex-3] = 127;
// break;
// default:
// break;
// }
break;
}
}
}
}
CGImageRef newCGImage = CGBitmapContextCreateImage(context);
UIImage *result = [UIImage imageWithCGImage:newCGImage scale:[self scale] orientation:UIImageOrientationUp];
CGImageRelease(newCGImage);
CGContextRelease(context);
free(rawData);
return result;
}
//==============================================================================
bool testBorderLayer(long byteIndex,
unsigned char *imageData,
long dataSize,
long pWidth,
long pHeight,
int border) {
int width = border * 2 + 1;
int height = width - 2;
// run thru border pixels
// |-|
// | |
// |-|
//top,bot - hor
for (int i = 1; i < width - 1; i++) {
long topIndex = byteIndex + 4 * ( - border * pWidth - border + i);
long botIndex = byteIndex + 4 * ( border * pWidth - border + i);
long destColl = byteIndex/4 % pWidth - border + i;
if (destColl > 1 && destColl < pWidth) {
if (testPoint(topIndex, imageData, dataSize) ||
testPoint(botIndex, imageData, dataSize)) {
return true;
}
}
}
//left,right - ver
if (byteIndex / 4 % pWidth < pWidth - border - 1) {
for (int k = 0; k < height; k++) {
long rightIndex = byteIndex + 4 * ( border - (border) * pWidth + pWidth * k);
if (testPoint(rightIndex, imageData, dataSize)) {
return true;
}
}
}
if (byteIndex / 4 % pWidth > border) {
for (int k = 0; k < height; k++) {
long leftIndex = byteIndex + 4 * ( - border - (border) * pWidth + pWidth * k);
if (testPoint(leftIndex, imageData, dataSize)) {
return true;
}
}
}
return false;
}
//==============================================================================
bool testPoint(long pointIndex, unsigned char *imageData, long dataSize) {
if (pointIndex >= 0 && pointIndex < dataSize * 4 - 1 &&
imageData[pointIndex] < 30) {
return true;
}
return false;
}
//==============================================================================
Sorry for rare commenting ;)
I suggest looking at this CIFilter list put out by apple, they got some pretty decent filters.
/reference/CoreImageFilterReference
And also check out GPUImage.
https://github.com/BradLarson/GPUImage

OpenGL Polygon drawing anti aliasing (in cocos2d)

I'm using cocos2d as a game enigne and I'm trying to draw nice anti-aliased polygons in there.
I wrote a simple polygon (for now only triangle) drawing class similar to the CCDrawNode of cocos2d.
How can I apply anti-aliasing to only these triangles w/o applying AA to the whole system? :)
MLDrawNode.h
#import "CCNode.h"
#import "CCProgressTimer.h"
typedef struct {
NSUInteger indexInBuffer;
NSUInteger vertexCount;
MLVertexData *vertexData;
} mlPolygonData;
#interface MLDrawNode : CCNode
{
GLuint _vao;
GLuint _vbo;
NSUInteger _bufferCapacity;
GLsizei _bufferCount;
MLVertexData *_buffer;
ccBlendFunc _blendFunc;
NSUInteger _nextFreeVertexIndex;
BOOL _dirty;
}
#property(nonatomic, assign) ccBlendFunc blendFunc;
-(mlPolygonData) drawPolyWithVerts:(CGPoint *)verts count:(NSUInteger)count fillColor:(ccColor4F)fill borderWidth:(CGFloat)width borderColor:(ccColor4F)line;
-(void) clear;
#end
MLDrawNode.m
#implementation MLDrawNode
-(void)ensureCapacity:(NSUInteger)count
{
if(_bufferCount + count > _bufferCapacity){
_bufferCapacity += MAX(_bufferCapacity, count);
_buffer = realloc(_buffer, _bufferCapacity*sizeof(MLVertexData));
// NSLog(#"Resized vertex buffer to %d", _bufferCapacity);
}
}
-(id)init {
if((self = [super init])){
self.blendFunc = (ccBlendFunc){CC_BLEND_SRC, CC_BLEND_DST};
self.shaderProgram = [[CCShaderCache sharedShaderCache] programForKey:kCCShader_PositionLengthTexureColor];
[self ensureCapacity:512];
glGenVertexArrays(1, &_vao);
ccGLBindVAO(_vao);
glGenBuffers(1, &_vbo);
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(ccV2F_C4B_T2F)*_bufferCapacity, _buffer, GL_STREAM_DRAW);
glEnableVertexAttribArray(kCCVertexAttrib_Position);
glVertexAttribPointer(kCCVertexAttrib_Position, 2, GL_FLOAT, GL_FALSE, sizeof(MLVertexData), (GLvoid *)offsetof(ccV2F_C4B_T2F, vertices));
glEnableVertexAttribArray(kCCVertexAttrib_Color);
glVertexAttribPointer(kCCVertexAttrib_Color, 4, GL_FLOAT, GL_FALSE, sizeof(MLVertexData), (GLvoid *)offsetof(MLVertexData, color));
glEnableVertexAttribArray(kCCVertexAttrib_TexCoords);
glVertexAttribPointer(kCCVertexAttrib_TexCoords, 2, GL_FLOAT, GL_FALSE, sizeof(MLVertexData), (GLvoid *)offsetof(MLVertexData, texCoord));
glBindBuffer(GL_ARRAY_BUFFER, 0);
ccGLBindVAO(0);
CHECK_GL_ERROR();
_dirty = YES;
_nextFreeVertexIndex = 0;
}
return self;
}
-(void)dealloc
{
#ifdef __CC_PLATFORM_IOS
NSAssert([EAGLContext currentContext], #"No GL context set!");
#endif
free(_buffer); _buffer = NULL;
glDeleteBuffers(1, &_vbo); _vbo = 0;
glDeleteVertexArrays(1, &_vao); _vao = 0;
}
-(void)render
{
if( _dirty ) {
glBindBuffer(GL_ARRAY_BUFFER, _vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(MLVertexData)*_bufferCapacity, _buffer, GL_STREAM_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
_dirty = NO;
}
ccGLBindVAO(_vao);
glDrawArrays(GL_TRIANGLES, 0, _bufferCount);
CC_INCREMENT_GL_DRAWS(1);
CHECK_GL_ERROR();
}
-(void)draw
{
_dirty = YES;
ccGLBlendFunc(_blendFunc.src, _blendFunc.dst);
[_shaderProgram use];
[_shaderProgram setUniformsForBuiltins];
[self render];
}
-(mlPolygonData)drawPolyWithVerts:(CGPoint *)verts count:(NSUInteger)count fillColor: (ccColor4F)fill borderWidth:(CGFloat)width borderColor:(ccColor4F)line;
{
[self ensureCapacity: count];
for (int i = 0; i < count; i++) {
MLVertexData newVertex = MLVertexDataMake(0.0, 0.0, verts[i].x, verts[i].y, mlColor4FMake(fill.r, fill.g, fill.b, fill.a));
_buffer[_nextFreeVertexIndex + i] = newVertex;
}
mlPolygonData polyData;
polyData.indexInBuffer = _nextFreeVertexIndex;
polyData.vertexCount = count;
polyData.vertexData = _buffer;
_nextFreeVertexIndex += count;
_bufferCount += count;
_dirty = YES;
return polyData;
}
-(void)clear
{
_bufferCount = 0;
_nextFreeVertexIndex = 0;
_dirty = YES;
}
#end

Hide parts of image in OpenGL ES

I'm using GLKView to render some sprites in a iOS app.
My question is, how can I remove/draw only parts of one image? For example, I have a background, and on top of it an image (both sprites). I want to take some random rectangles out of the image on top, so the background will be visible in those rectangles. Is that possible?
I'm creating my textures like this:
- (id)initWithFile:(NSString *)fileName effect:(GLKBaseEffect *)effect position:(GLKVector2)position{
if ((self = [super init])) {
self.effect = effect;
NSDictionary * options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],
GLKTextureLoaderOriginBottomLeft,
nil];
NSError * error;
NSString *path = [[NSBundle mainBundle] pathForResource:fileName ofType:nil];
self.textureInfo = [GLKTextureLoader textureWithContentsOfFile:path options:options error:&error];
self.contentSize = CGSizeMake(self.textureInfo.width, self.textureInfo.height);
TexturedQuad newQuad;
newQuad.bl.geometryVertex = CGPointMake(0, 0);
newQuad.br.geometryVertex = CGPointMake(self.textureInfo.width, 0);
newQuad.tl.geometryVertex = CGPointMake(0, self.textureInfo.height);
newQuad.tr.geometryVertex = CGPointMake(self.textureInfo.width, self.textureInfo.height);
newQuad.bl.textureVertex = CGPointMake(0, 0);
newQuad.br.textureVertex = CGPointMake(1, 0);
newQuad.tl.textureVertex = CGPointMake(0, 1);
newQuad.tr.textureVertex = CGPointMake(1, 1);
self.quad = newQuad;
self.position = position;
self.frameHeight = self.textureInfo.height;
}
return self;
}
And then render them like this
- (void)render {
self.effect.texture2d0.name = self.textureInfo.name;
self.effect.texture2d0.enabled = YES;
self.effect.transform.modelviewMatrix = self.modelMatrix;
[self.effect prepareToDraw];
long offset = (long)&_quad;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, geometryVertex)));
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, textureVertex)));
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
Typically this is done with a second texture that is an alpha map. In the shader, the alpha texture will have regions that are full opaque and other regions that are fully transparent. The alpha channel of the alpha texture is multiplied by the image texture alpha to get the final color.

Resources