iOS - CIImage autoAdjustmentFilters leak - ios

I'm using the auto enhance feature to improve a lot of images.
But it seems there is a big leak, so here's the code I'm using :
#import <CoreImage/CoreImage.h>
#import <ImageIO/ImageIO.h>
#implementation UIImage (AutoEnhanced)
- (UIImage *)autoEnhancedImage
{
#autoreleasepool {
CIImage *ciOriginal = self.CIImage;
if(!ciOriginal) {
ciOriginal = [[CIImage alloc] initWithCGImage:self.CGImage];
}
NSDictionary *options = #{ CIDetectorImageOrientation : #(self.CGImagePropertyOrientation)};
NSArray *adjustments = [ciOriginal autoAdjustmentFiltersWithOptions:options];
for (CIFilter *filter in adjustments) {
[filter setValue:ciOriginal forKey:kCIInputImageKey];
ciOriginal = filter.outputImage;
}
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef cgImage = [context createCGImage:ciOriginal fromRect:ciOriginal.extent];
UIImage *enhancedImage = [[UIImage alloc] initWithCGImage:cgImage];
CGImageRelease(cgImage);
return enhancedImage;
}
}
- (CGImagePropertyOrientation)CGImagePropertyOrientation
{
switch (self.imageOrientation) {
case UIImageOrientationUp:
return kCGImagePropertyOrientationUp;
case UIImageOrientationUpMirrored:
return kCGImagePropertyOrientationUpMirrored;
case UIImageOrientationDown:
return kCGImagePropertyOrientationDown;
case UIImageOrientationDownMirrored:
return kCGImagePropertyOrientationDownMirrored;
case UIImageOrientationLeftMirrored:
return kCGImagePropertyOrientationLeftMirrored;
case UIImageOrientationRight:
return kCGImagePropertyOrientationRight;
case UIImageOrientationRightMirrored:
return kCGImagePropertyOrientationRightMirrored;
case UIImageOrientationLeft:
return kCGImagePropertyOrientationLeft;
}
}
#end
And here's the log from Instruments :
Even with an autoreleasepool, I can't fix it. Any idea would be very much appreciated ! Thanks !

Related

CIDetectorTypeQRCode fails to scan transparent images

I am trying to scan QR images that user chooses from disk. I found a strange issue where all libraries I tried failed (CIDetector old port of ZXING or ZBAR).
I know that there are ways to add white background (e.g. redraw image or using CIFilter) so the image will get scanned.
What is the correct way of scanning QR codes with transparent background (configure CIContext or CIDetector). (the image below fails to scan on iOS and macOS).
https://en.wikipedia.org/wiki/QR_code#/media/File:QR_code_for_mobile_English_Wikipedia.svg
- (void)scanImage:(CIImage *)image
{
NSArray <CIFeature *>*features = [[self QRdetector] featuresInImage:image];
NSLog(#"Number of features found: %lu", [features count]);
}
- (CIDetector *)QRdetector
{
CIContext *context = [CIContext contextWithOptions:#{kCIContextWorkingColorSpace : [NSNull null]}]; //no difference using special options or nil as a context
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:context options:#{CIDetectorAccuracy : CIDetectorAccuracyHigh, CIDetectorAspectRatio : #(1)}];
return detector;
}
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
NSURL *URL = [[NSBundle mainBundle] URLForResource:#"transparentqrcode" withExtension:#"png"];
UIImage *image = [UIImage imageWithContentsOfFile:[URL path]];
CIImage *ciImage = [CIImage imageWithContentsOfURL:URL];
//CUSTOM CODE TO ADD WHITE BACKGROUND
CIFilter *filter = [CIFilter filterWithName:#"CISourceAtopCompositing"];
[filter setDefaults];
CIColor *whiteColor = [[CIColor alloc] initWithColor:[UIColor whiteColor]];
CIImage *colorImage = [CIImage imageWithColor:whiteColor];
colorImage = [colorImage imageByCroppingToRect:ciImage.extent];
[filter setValue:ciImage forKey:kCIInputImageKey];
[filter setValue:colorImage forKey:kCIInputBackgroundImageKey];
CIImage *newImage = [filter valueForKey:kCIOutputImageKey];
[self scanImage:ciImage];
return YES;
}
As mentioned in the comments, it appears CIDetector treats the alpha channel as black. Replacing it with white works --- unless the QRCode itself is white with a transparent background.
I haven't done any profiling to see if this would be quicker, but it might be a better option.
- (IBAction)didTap:(id)sender {
NSURL *URL = [[NSBundle mainBundle] URLForResource:#"transparentqrcode" withExtension:#"png"];
CIImage *ciImage = [CIImage imageWithContentsOfURL:URL];
NSArray <CIFeature *>*features = [self getImageFeatures:ciImage];
// if CIDetector failed to find / process a QRCode in the image,
// (such as when the image has a transparent background),
// invert the colors and try again
if (features.count == 0) {
CIFilter *filter = [CIFilter filterWithName:#"CIColorInvert"];
[filter setValue:ciImage forKey:kCIInputImageKey];
CIImage *newImage = [filter valueForKey:kCIOutputImageKey];
features = [self getImageFeatures:newImage];
}
if (features.count > 0) {
for (CIQRCodeFeature* qrFeature in features) {
NSLog(#"QRFeature.messageString : %# ", qrFeature.messageString);
}
} else {
NSLog(#"Unable to decode image!");
}
}
- (NSArray <CIFeature *>*)getImageFeatures:(CIImage *)image
{
NSArray <CIFeature *>*features = [[self QRdetector] featuresInImage:image];
NSLog(#"Number of features found: %lu", [features count]);
return features;
}
- (CIDetector *)QRdetector
{
CIContext *context = [CIContext contextWithOptions:#{kCIContextWorkingColorSpace : [NSNull null]}]; //no difference using special options or nil as a context
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:context options:#{CIDetectorAccuracy : CIDetectorAccuracyHigh, CIDetectorAspectRatio : #(1)}];
return detector;
}

CIFilter how to turn white imageView.image with transparency to any color

I am trying to turn white imageView.image into different colors programmatically. The outlines are currently white.
I am trying to use this solution with CIFilter but the images don't appear at all.
Here is the pasted code for reference:
- (UIImage *) filterImage: (CIImage *)beginImage color: (UIColor *) color{
CIImage *output = [CIFilter filterWithName:#"CIColorMonochrome" keysAndValues:kCIInputImageKey, beginImage, #"inputIntensity", [NSNumber numberWithFloat:1.0], #"inputColor", [[CIColor alloc] initWithColor:color], nil].outputImage;
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef cgiimage = [context createCGImage:output fromRect:output.extent];
UIImage *newImage = [UIImage imageWithCGImage:cgiimage];
CGImageRelease(cgiimage);
return newImage;
}
Solution .h file
#import <UIKit/UIKit.h>
#interface ViewController : UIViewController
#property (weak, readwrite, nonatomic) IBOutlet UIImageView *imgView;
#end
Solution .m file
#import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
CIImage *inputImage = [CIImage imageWithCGImage:[UIImage imageNamed:#"NJDLv"].CGImage];
self.imgView.image = [self filterImage:inputImage color:[UIColor yellowColor]];
}
- (UIImage *) filterImage: (CIImage *)beginImage color: (UIColor *) color{
CIColor *ciColor = [CIColor colorWithCGColor:color.CGColor];
CIImage *output = [CIFilter filterWithName:#"CIColorMonochrome"
keysAndValues:kCIInputImageKey, beginImage,
#"inputIntensity",#(1.0),
#"inputColor", ciColor, nil].outputImage;
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef cgiimage = [context createCGImage:output fromRect:output.extent];
UIImage *newImage = [UIImage imageWithCGImage:cgiimage];
CGImageRelease(cgiimage);
return newImage;
}
#end

Rotation With Animation in GPUImage

I Am using GPUImage.
I Want to Rotate (With 90, 180, 270, 360 degree) Image With Animation using GPUImageFilter.
Please Help.
Thanks in Advance.
Just Check What i did to do For Rotation
int tag - is defined in .h file
-(IBAction)btnRotate_Clicked:(id)sender
{
[self hideFilterView];
[self hideFontView];
[self hideEnhanceView];
if (tag == 0)
{
staticPictureOriginalOrientation = UIImageOrientationRight;
tag = 1;
}
else if (tag == 1)
{
staticPictureOriginalOrientation = UIImageOrientationDown;
tag = 2;
}
else if (tag == 2)
{
staticPictureOriginalOrientation = UIImageOrientationLeft;
tag = 3;
}
else
{
staticPictureOriginalOrientation = UIImageOrientationUp;
tag = 0;
}
UIImageOrientation orientation = staticPictureOriginalOrientation;
switch(orientation){
case UIImageOrientationUp:
[self.MimageView setFillMode:kGPUImageFillModePreserveAspectRatioAndFill];
break;
case UIImageOrientationLeft:
[self.MimageView setFillMode:kGPUImageFillModePreserveAspectRatio];
break;
case UIImageOrientationRight:
[self.MimageView setFillMode:kGPUImageFillModePreserveAspectRatio];
break;
case UIImageOrientationDown:
[self.MimageView setFillMode:kGPUImageFillModePreserveAspectRatioAndFill];
break;
default:
break;
}
[self prepareStaticFilter];
}
-(void) prepareStaticFilter
{
isImageProcessed = TRUE;
[staticPicture addTarget:filter];
if (hasBlur)
{
[filter addTarget:blurFilter];
[blurFilter addTarget:self.MimageView];
}
else
{
[filter addTarget:self.MimageView];
}
GPUImageRotationMode imageViewRotationMode = kGPUImageNoRotation;
switch (staticPictureOriginalOrientation)
{
case UIImageOrientationLeft:
imageViewRotationMode = kGPUImageRotateLeft;
break;
case UIImageOrientationRight:
imageViewRotationMode = kGPUImageRotateRight;
break;
case UIImageOrientationDown:
imageViewRotationMode = kGPUImageRotate180;
break;
default:
imageViewRotationMode = kGPUImageNoRotation;
break;
}
[filter setInputRotation:imageViewRotationMode atIndex:0];
[staticPicture processImage];
}

iOS - Setting blurred image on top of other views, odd issues

So, I've got an odd scenario.
In my iOS app, I'm trying to blur the content area of the screen when a popover is opened.
I have this working when using Core Image, but only when using Gaussian blur- none of the other blurs work, which is odd.
I tried doing the same with GPUImage, and it blurs far faster, but doesn't actually put the view on top of the other views!
To summarize: in the source below, setBlurOnView will work properly- however setBlurOnViewWithGPUImage appears to not be working. The blur view (tag 6110) is created, but the app doesn't actually blur.
Note: This is on iOS 6, in the simulator.
Here's the relevant source:
// ScreenBlur.m
#import <QuartzCore/QuartzCore.h>
#import <CoreImage/CoreImage.h>
#import <GPUImage/GPUImage.h>
#import "ScreenBlur.h"
#import "GlobalData.h"
#import "Logger.h"
#implementation ScreenBlur
+ (void) setBlurOnViewWithGPUImage:(UIView*)view {
GPUImagePicture *imageSource = [[GPUImagePicture alloc] initWithImage:[self captureScreenInRect:view.frame inView:view]];
GPUImageGaussianBlurFilter *blur = [[GPUImageGaussianBlurFilter alloc] init];
[imageSource addTarget:blur];
[imageSource processImage];
[self setImage:[imageSource imageFromCurrentlyProcessedOutput] toView:view];
}
+ (void) setBlurOnView:(UIView *)view {
//http://stackoverflow.com/questions/17041669/creating-a-blurring-overlay-view
CIImage *inputImage = [CIImage imageWithCGImage:[self captureScreenInRect:view.frame inView:view].CGImage];
//CIContext *context = [CIContext contextWithOptions:nil];
if ([GlobalData getInstance].ciContext == nil) {
[Logger Log:#"ciContext does not exist, creating..." fromClass:#"ScreenBlur"];
// [GlobalData getInstance].ciContext = [CIContext contextWithOptions:nil]; //cpu context
[GlobalData getInstance].ciContext = [CIContext contextWithEAGLContext:[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]];
}
//set up the blur filter
CIFilter *filter = [CIFilter filterWithName:#"CIGaussianBlur"];
[filter setValue:inputImage forKey:kCIInputImageKey];
[filter setValue:[NSNumber numberWithFloat:3.0f] forKey:#"inputRadius"];
CIImage *result = [filter valueForKey:kCIOutputImageKey];
// CIGaussianBlur has a tendency to shrink the image a little,
// this ensures it matches up exactly to the bounds of our original image
CGImageRef cgImage = [[GlobalData getInstance].ciContext createCGImage:result fromRect:[inputImage extent]];
[self setImage:[UIImage imageWithCGImage:cgImage] toView:view];
}
+ (void) setImage:(UIImage*)blurredImage toView:(UIView*)view {
UIView *blurView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, blurredImage.size.width, blurredImage.size.height)];
[blurView setBackgroundColor:[UIColor colorWithPatternImage:blurredImage]];
[blurView setTag:6110];
//set the image as the foreground for the view
[view addSubview:blurView];
[view bringSubviewToFront:blurView];
}
//same as the method above, but resizes the screenshot before applying the blur for increased performance at the expense of image quality.
+ (void) setBlurOnViewPerformanceMode:(UIView *)view {
//http://stackoverflow.com/questions/17041669/creating-a-blurring-overlay-view
UIImage *screenShot = [self imageWithImage:[self captureScreenInRect:view.frame inView:view] scaledToSize:CGSizeMake(view.frame.size.width / 2, view.frame.size.height / 2)];
CIImage *inputImage = [CIImage imageWithCGImage:screenShot.CGImage];
//CIContext *context = [CIContext contextWithOptions:nil];
if ([GlobalData getInstance].ciContext == nil) {
[Logger Log:#"ciContext does not exist, creating..." fromClass:#"ScreenBlur"];
// [GlobalData getInstance].ciContext = [CIContext contextWithOptions:nil]; //cpu context
[GlobalData getInstance].ciContext = [CIContext contextWithEAGLContext:[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]];
}
//set up the blur filter
CIFilter *filter = [CIFilter filterWithName:#"CIGaussianBlur"];
[filter setValue:inputImage forKey:kCIInputImageKey];
[filter setValue:[NSNumber numberWithFloat:3.0f] forKey:#"inputRadius"];
CIImage *result = [filter valueForKey:kCIOutputImageKey];
//CGImageRef cgImage = [[GlobalData getInstance].ciContext createCGImage:result fromRect:[inputImage extent]];
CGImageRef cgImage = [[GlobalData getInstance].ciContext createCGImage:result fromRect:[inputImage extent]];
[self setImage:[self imageWithImage:[UIImage imageWithCGImage:cgImage] scaledToSize:view.frame.size] toView:view];
}
+ (UIImage *)imageWithImage:(UIImage *)image scaledToSize:(CGSize)newSize {
//UIGraphicsBeginImageContext(newSize);
UIGraphicsBeginImageContextWithOptions(newSize, NO, 0.0);
[image drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
+ (void) removeBlurFromView:(UIView *)view {
for (UIView *subView in view.subviews) {
if (subView.tag == 6110) {
[subView removeFromSuperview];
}
}
}
+ (UIImage *)captureScreenInRect:(CGRect)captureFrame inView:(UIView*) view {
CALayer *layer;
layer = view.layer;
UIGraphicsBeginImageContext(view.bounds.size);
CGContextClipToRect (UIGraphicsGetCurrentContext(),captureFrame);
[layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *screenImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return screenImage;
}
#end
And then in my view controller, it's simply called with
[ScreenBlur setBlurOnView:self.view];
I found a workaround for this (or, who knows, maybe this is how it was supposed to be done).
//ScreenBlur.m
+ (GPUImageView*) getBlurredImageWithGPUImageFromView:(UIView*)view {
GPUImagePicture *imageSource = [[GPUImagePicture alloc] initWithImage:[self captureScreenInRect:view.frame inView:view] smoothlyScaleOutput:true];
GPUImageFastBlurFilter *blur = [[GPUImageFastBlurFilter alloc] init];
[blur setBlurPasses:3];
[imageSource addTarget:blur];
GPUImageView *filteredView = [[GPUImageView alloc] initWithFrame:view.frame];
[blur addTarget:filteredView];
[imageSource processImage];
return filteredView;
}
//ViewController.m
//blur the main screen
GPUImageView *blurred = [ScreenBlur getBlurredImageWithGPUImageFromView:self.view];
[blurred setTag:6110];
[self.view addSubview:blurred];
[self.view bringSubviewToFront:blurred];

Custom camera iOS

I got an app, where i tried to implement custom camera. This is source code:
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//device.position ;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
[session startRunning];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
Then i tried to create photo and send it to other view controller:
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
//NSLog(#"about to request a capture from: %#", stillImageOutput);
AcceptingPhotoViewController *photo = [[AcceptingPhotoViewController alloc] initWithNibName:#"AcceptingPhotoViewController" bundle:nil];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
photo.image = [[UIImage alloc] init ];
photo.image = image;
photo.photoFromCamera = YES;
[self.navigationController pushViewController:photo animated:NO];
}];
}
But in my target class this image is rotating to the left on 90 degrees:
I tried to rotate it back:
float newSide = MAX([image size].width, [image size].height);
CGSize size = CGSizeMake(newSide, newSide);
UIGraphicsBeginImageContext(size);
CGContextRef ctx = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(ctx, newSide/2, newSide/2);
CGContextRotateCTM(ctx, 1.57079633);
CGContextDrawImage(UIGraphicsGetCurrentContext(),CGRectMake(-[image size].width/2,-[image size].height/2,size.width, size.height),image.CGImage);
UIImage *i = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = i;
My image rotated on the right, but it display mirrored and stretch.
Any help? Can i rotate in other way, or maybe i should take photo not like this?
I found solve of this problem:
I rotate and transform image in this way
UIView* rotatedViewBox = [[UIView alloc] initWithFrame: CGRectMake(0, 0, image.size.width, image.size.height)];
float angleRadians = 90 * ((float)M_PI / 180.0f);
CGAffineTransform t = CGAffineTransformMakeRotation(angleRadians);
rotatedViewBox.transform = t;
CGSize rotatedSize = rotatedViewBox.frame.size;
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
CGContextRotateCTM(bitmap, angleRadians);
CGContextScaleCTM(bitmap, 1.0, -1.0);
CGContextDrawImage(bitmap, CGRectMake(-image.size.width / 2, -image.size.height / 2, image.size.width, image.size.height), [image CGImage]);
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = newImage;
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
UIGraphicsBeginImageContext(newSize);
// Tell the old image to draw in this new context, with the desired
// new size
[image drawInRect:CGRectMake(0,0,image.size.height,image.size.width)];
// Get the new image from the context
UIImage* newImage2 = UIGraphicsGetImageFromCurrentImageContext();
// End the context
UIGraphicsEndImageContext();
image = newImage2;
I also may the same problem just copy and paste.
Try this code it will helps u..
- (UIImage *)fixrotation:(UIImage *)image{
if (image.imageOrientation == UIImageOrientationDown) return image;
CGAffineTransform transform = CGAffineTransformIdentity;
switch (image.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (image.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height,
CGImageGetBitsPerComponent(image.CGImage), 0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
CGContextConcatCTM(ctx, transform);
switch (image.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}

Resources