Basically I have image that gets drawn when the touch is moved. When i toggle on a button called eraser I want it to detect if the context I have drawn is NOT black at the position the touch is at. Is there any way to do this?
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
hue = 0.0;
[self initContext:frame.size];
framsize = frame.size;
}
return self;
}
-(void)clear{
cacheContext = nil;
cacheBitmap = nil;
[self initContext:framsize];
[self setNeedsDisplay];
}
- (BOOL) initContext:(CGSize)size {
float scaleFactor = [[UIScreen mainScreen] scale];
int bitmapByteCount;
int bitmapBytesPerRow;
// Declare the number of bytes per row. Each pixel in the bitmap in this
// example is represented by 4 bytes; 8 bits each of red, green, blue, and
// alpha.
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height)*scaleFactor*scaleFactor;
// Allocate memory for image data. This is the destination in memory
// where any drawing to the bitmap context will be rendered.
cacheBitmap = malloc( bitmapByteCount );
if (cacheBitmap == NULL){
return NO;
}
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
cacheContext = CGBitmapContextCreate (cacheBitmap, size.width*scaleFactor, size.height *scaleFactor, 8, bitmapBytesPerRow*scaleFactor, colorSpace, bitmapInfo);
CGContextScaleCTM(cacheContext, scaleFactor, scaleFactor);
CGColorSpaceRelease(colorSpace);
CGContextSetRGBFillColor(cacheContext, 1, 0, 0, 0.0);
CGContextFillRect(cacheContext, (CGRect){CGPointZero, CGSizeMake(size.height*scaleFactor, size.width*scaleFactor)});
return YES;
}
//-(float) alphaAtX:(int)x y:(int)y //get alpha component using the pointer 'pixelData'
//{
// float scaleFactor = [[UIScreen mainScreen] scale];
// return pixelData[(y * 320*scaleFactor + x) *4 + 3]; //+0 for red, +1 for green, +2 for blue, +3 for alpha
//}
-(UIColor *) colorOfPoint:(CGPoint)point
{
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel,
1, 1, 8, 4, colorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0
green:pixel[1]/255.0 blue:pixel[2]/255.0
alpha:pixel[3]/255.0];
return color;
}
- (void) drawToCache:(CGPoint)currentpos andLastPos:(CGPoint)pos Color:(UIColor *)colors Thickness:(float)thickness{
hue += 0.005;
if(hue > 1.0) hue = 0.0;
UIColor *color;
if (!colors) {
color = [UIColor colorWithHue:hue saturation:0.7 brightness:1.0 alpha:1.0];
} else {
color = colors;
}
CGContextSetStrokeColorWithColor(cacheContext, [color CGColor]);
CGContextSetLineCap(cacheContext, kCGLineCapRound);
CGContextSetLineWidth(cacheContext, 6+thickness);
CGPoint lastPoint = currentpos;
CGPoint newPoint = lastPoint;
CGContextMoveToPoint(cacheContext, lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(cacheContext, newPoint.x, newPoint.y);
CGContextStrokePath(cacheContext);
CGRect dirtyPoint1 = CGRectMake(lastPoint.x-10, lastPoint.y-10, 20, 20);
CGRect dirtyPoint2 = CGRectMake(newPoint.x-10, newPoint.y-10, 20, 20);
[self setNeedsDisplayInRect:CGRectUnion(dirtyPoint1, dirtyPoint2)];
}
- (void) drawRect:(CGRect)rect {
CGContextRef context = UIGraphicsGetCurrentContext();
CGImageRef cacheImage = CGBitmapContextCreateImage(cacheContext);
CGContextDrawImage(context, self.bounds, cacheImage);
CGImageRelease(cacheImage);
}
#end
AFAIK there is no easy way to get color of some point in CGContext. But you can use approach described here http://www.markj.net/iphone-uiimage-pixel-color/
I have not tested this code but I hope it works or is easy to fix:
// ctxSize - is a size of context
- (UIColor*) getPixelColorForContext:(CGContextRef)cgctx size:(CGSize)ctxSize atLocation:(CGPoint)point
{
UIColor* color = nil;
// Create off screen bitmap context to draw the image into. Format ARGB is 4 bytes for each pixel: Alpa, Red, Green, Blue
if (cgctx == NULL) { return nil; /* error */ }
size_t w = ctxSize.width;
// Now we can get a pointer to the image data associated with the bitmap
// context.
unsigned char* data = CGBitmapContextGetData (cgctx);
if (data != NULL) {
//offset locates the pixel in the data from x,y.
//4 for 4 bytes of data per pixel, w is width of one row of data.
int offset = 4*((w*round(point.y))+round(point.x));
int alpha = data[offset];
int red = data[offset+1];
int green = data[offset+2];
int blue = data[offset+3];
NSLog(#"offset: %i colors: RGB A %i %i %i %i",offset,red,green,blue,alpha);
color = [UIColor colorWithRed:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];
}
// Free image data memory for the context
if (data) { free(data); }
return color;
}
Related
Basically I have image that gets drawn when the touch is moved. When i toggle on a button called eraser I want it to detect if the context I have drawn is NOT black at the position the touch is at. Is there any way to do this?
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
hue = 0.0;
[self initContext:frame.size];
framsize = frame.size;
}
return self;
}
-(void)clear{
cacheContext = nil;
cacheBitmap = nil;
[self initContext:framsize];
[self setNeedsDisplay];
}
- (BOOL) initContext:(CGSize)size {
float scaleFactor = [[UIScreen mainScreen] scale];
int bitmapByteCount;
int bitmapBytesPerRow;
// Declare the number of bytes per row. Each pixel in the bitmap in this
// example is represented by 4 bytes; 8 bits each of red, green, blue, and
// alpha.
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height)*scaleFactor*scaleFactor;
// Allocate memory for image data. This is the destination in memory
// where any drawing to the bitmap context will be rendered.
cacheBitmap = malloc( bitmapByteCount );
if (cacheBitmap == NULL){
return NO;
}
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
cacheContext = CGBitmapContextCreate (cacheBitmap, size.width*scaleFactor, size.height *scaleFactor, 8, bitmapBytesPerRow*scaleFactor, colorSpace, bitmapInfo);
CGContextScaleCTM(cacheContext, scaleFactor, scaleFactor);
CGColorSpaceRelease(colorSpace);
CGContextSetRGBFillColor(cacheContext, 1, 0, 0, 0.0);
CGContextFillRect(cacheContext, (CGRect){CGPointZero, CGSizeMake(size.height*scaleFactor, size.width*scaleFactor)});
return YES;
}
//-(float) alphaAtX:(int)x y:(int)y //get alpha component using the pointer 'pixelData'
//{
// float scaleFactor = [[UIScreen mainScreen] scale];
// return pixelData[(y * 320*scaleFactor + x) *4 + 3]; //+0 for red, +1 for green, +2 for blue, +3 for alpha
//}
-(UIColor *) colorOfPoint:(CGPoint)point
{
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel,
1, 1, 8, 4, colorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0
green:pixel[1]/255.0 blue:pixel[2]/255.0
alpha:pixel[3]/255.0];
return color;
}
- (void) drawToCache:(CGPoint)currentpos andLastPos:(CGPoint)pos Color:(UIColor *)colors Thickness:(float)thickness{
hue += 0.005;
if(hue > 1.0) hue = 0.0;
UIColor *color;
if (!colors) {
color = [UIColor colorWithHue:hue saturation:0.7 brightness:1.0 alpha:1.0];
} else {
color = colors;
}
CGContextSetStrokeColorWithColor(cacheContext, [color CGColor]);
CGContextSetLineCap(cacheContext, kCGLineCapRound);
CGContextSetLineWidth(cacheContext, 6+thickness);
CGPoint lastPoint = currentpos;
CGPoint newPoint = lastPoint;
CGContextMoveToPoint(cacheContext, lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(cacheContext, newPoint.x, newPoint.y);
CGContextStrokePath(cacheContext);
CGRect dirtyPoint1 = CGRectMake(lastPoint.x-10, lastPoint.y-10, 20, 20);
CGRect dirtyPoint2 = CGRectMake(newPoint.x-10, newPoint.y-10, 20, 20);
[self setNeedsDisplayInRect:CGRectUnion(dirtyPoint1, dirtyPoint2)];
}
- (void) drawRect:(CGRect)rect {
CGContextRef context = UIGraphicsGetCurrentContext();
CGImageRef cacheImage = CGBitmapContextCreateImage(cacheContext);
CGContextDrawImage(context, self.bounds, cacheImage);
CGImageRelease(cacheImage);
}
#end
AFAIK there is no easy way to get color of some point in CGContext. But you can use approach described here http://www.markj.net/iphone-uiimage-pixel-color/
I have not tested this code but I hope it works or is easy to fix:
// ctxSize - is a size of context
- (UIColor*) getPixelColorForContext:(CGContextRef)cgctx size:(CGSize)ctxSize atLocation:(CGPoint)point
{
UIColor* color = nil;
// Create off screen bitmap context to draw the image into. Format ARGB is 4 bytes for each pixel: Alpa, Red, Green, Blue
if (cgctx == NULL) { return nil; /* error */ }
size_t w = ctxSize.width;
// Now we can get a pointer to the image data associated with the bitmap
// context.
unsigned char* data = CGBitmapContextGetData (cgctx);
if (data != NULL) {
//offset locates the pixel in the data from x,y.
//4 for 4 bytes of data per pixel, w is width of one row of data.
int offset = 4*((w*round(point.y))+round(point.x));
int alpha = data[offset];
int red = data[offset+1];
int green = data[offset+2];
int blue = data[offset+3];
NSLog(#"offset: %i colors: RGB A %i %i %i %i",offset,red,green,blue,alpha);
color = [UIColor colorWithRed:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];
}
// Free image data memory for the context
if (data) { free(data); }
return color;
}
I know similar question have been asked before.
What I want is to get the RGB pixel value of the Image Inside the Imageview, so it can be any image that pixel values we want to get.
This is what I have used to get the point where the image is clicked.
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2) {
//self.imageView.image = nil;
return;
}
CGPoint lastPoint = [touch locationInView:self.imageViewGallery];
NSLog(#"%f",lastPoint.x);
NSLog(#"%f",lastPoint.y);
}
And To get the Image I have Pasted this code.
+ (NSArray*)getRGBAsFromImage:(UIImage *)image atX:(int)xx andY:(int)yy count:(int)count
{
NSMutableArray *result = [NSMutableArray arrayWithCapacity:count];
/** It requires to get the image into your data buffer, so how can we get the `ImageViewImage` **/
CGImageRef imageRef = [image CGImage];
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
unsigned char *rawData = (unsigned char*) calloc(height * width * 4, sizeof(unsigned char));
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
CGContextRef context = CGBitmapContextCreate(rawData, width, height,
bitsPerComponent, bytesPerRow, colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
// Now your rawData contains the image data in the RGBA8888 pixel format.
int byteIndex = (bytesPerRow * yy) + xx * bytesPerPixel;
for (int ii = 0 ; ii < count ; ++ii)
{
CGFloat red = (rawData[byteIndex] * 1.0) / 255.0;
CGFloat green = (rawData[byteIndex + 1] * 1.0) / 255.0;
CGFloat blue = (rawData[byteIndex + 2] * 1.0) / 255.0;
CGFloat alpha = (rawData[byteIndex + 3] * 1.0) / 255.0;
byteIndex += 4;
UIColor *acolor = [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
[result addObject:acolor];
}
free(rawData);
return result;
}
I am new to ios so please explain and It and suggesting some tutorial will be great.
Use this example article. It is talking about a color picker using images. You can understand required info very easily from it. Helped me in my app. Let me know if any help/suggestion needed ..:)
EDIT:
update your getPixelColorAtLocation: like this. It will give you correct color then.
- (UIColor*) getPixelColorAtLocation:(CGPoint)point {
UIColor* color = nil;
CGImageRef inImage = self.image.CGImage;
// Create off screen bitmap context to draw the image into. Format ARGB is 4 bytes for each pixel: Alpa, Red, Green, Blue
CGContextRef cgctx = [self createARGBBitmapContextFromImage:inImage];
if (cgctx == NULL) { return nil; /* error */ }
size_t w = CGImageGetWidth(inImage);
size_t h = CGImageGetHeight(inImage);
CGRect rect = {{0,0},{w,h}};
/** Extra Added code for Resized Images ****/
float xscale = w / self.frame.size.width;
float yscale = h / self.frame.size.height;
point.x = point.x * xscale;
point.y = point.y * yscale;
/** ****************************************/
/** Extra Code Added for Resolution ***********/
CGFloat x = 1.0;
if ([self.image respondsToSelector:#selector(scale)]) x = self.image.scale;
/*********************************************/
// Draw the image to the bitmap context. Once we draw, the memory
// allocated for the context for rendering will then contain the
// raw image data in the specified color space.
CGContextDrawImage(cgctx, rect, inImage);
// Now we can get a pointer to the image data associated with the bitmap
// context.
unsigned char* data = CGBitmapContextGetData (cgctx);
if (data != NULL) {
//offset locates the pixel in the data from x,y.
//4 for 4 bytes of data per pixel, w is width of one row of data.
// int offset = 4*((w*round(point.y))+round(point.x));
int offset = 4*((w*round(point.y))+round(point.x))*x; //Replacement for Resolution
int alpha = data[offset];
int red = data[offset+1];
int green = data[offset+2];
int blue = data[offset+3];
NSLog(#"offset: %i colors: RGB A %i %i %i %i",offset,red,green,blue,alpha);
color = [UIColor colorWithRed:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];
}
// When finished, release the context
CGContextRelease(cgctx);
// Free image data memory for the context
if (data) { free(data); }
return color;
}
Let me know this fix does not work .. :)
Here is the GitHub for my code. Use it to implement picker image. Let me know if more info needed
Just use this method, it works for me:
- (UIColor*) getPixelColorAtLocation:(CGPoint)point
{
UIColor* color = nil;
CGImageRef inImage;
inImage = imgZoneWheel.image.CGImage;
// Create off screen bitmap context to draw the image into. Format ARGB is 4 bytes for each pixel: Alpa, Red, Green, Blue
CGContextRef cgctx = [self createARGBBitmapContextFromImage:inImage];
if (cgctx == NULL) { return nil; /* error */ }
size_t w = CGImageGetWidth(inImage);
size_t h = CGImageGetHeight(inImage);
CGRect rect = {{0,0},{w,h}};
// Draw the image to the bitmap context. Once we draw, the memory
// allocated for the context for rendering will then contain the
// raw image data in the specified color space.
CGContextDrawImage(cgctx, rect, inImage);
// Now we can get a pointer to the image data associated with the bitmap
// context.
unsigned char* data = CGBitmapContextGetData (cgctx);
if (data != NULL) {
//offset locates the pixel in the data from x,y.
//4 for 4 bytes of data per pixel, w is width of one row of data.
int offset = 4*((w*round(point.y))+round(point.x));
alpha = data[offset];
int red = data[offset+1];
int green = data[offset+2];
int blue = data[offset+3];
color = [UIColor colorWithRed:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];
}
// When finished, release the context
//CGContextRelease(cgctx);
// Free image data memory for the context
if (data) { free(data); }
return color;
}
Just use like this:
UIColor *color = [self getPixelColorAtLocation:lastPoint];
If you try to get color from image through CGBitmapContextGetData and set, for example, in background of view, this will be different colors in iPhone 6 and later. In iPhone 5 everything will be ok) More information about this Getting the right colors in your iOS app
This solution through UIImage give you right color:
- (UIColor *)getColorFromImage:(UIImage *)image pixelPoint:(CGPoint)pixelPoint {
CGImageRef imageRef = CGImageCreateWithImageInRect([image CGImage], CGRectMake(pixelPoint.x, pixelPoint.y, 1.f, 1.f));
UIImage *croppedImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
return [UIColor colorWithPatternImage:croppedImage];
}
You want to know how to get the image from an image view?
UIImageView has a property, image. Simply use that property.
I am using the following code to create a bitmap image.
UIGraphicsBeginImageContextWithOptions(CGSizeMake(targetWidth, targetHeight), NO, 0.0);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextClearRect(context,CGRectMake(0, 0, targetWidth, targetHeight));
float red ,green, blue ,alpha ;
for (int Row = 1; Row <= targetHeight; Row++)
{
if (Row <= originalHeight) {
for (int Col = 0; Col < targetWidth; iCol++)
{
if (Col < originalWidth) {
UIColor *color = [originalImage colorAtPixel:CGPointMake(Col, Row) :originalImage];
[color getRed:&red green:&green blue:&blue alpha:&alpha];
if (red == 0.0 && green == 0.0 && blue == 0.0 && alpha == 0.0) {
CGContextSetRGBFillColor(context, 0 , 0, 0 , 0); //set transparent pixels
}
else {
CGContextSetRGBFillColor(context, red , green, blue , 1);
}
CGContextFillRect(context, CGRectMake(Col,Row,1,1));
}
}
}
}
finalImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
//below code is used from another link
- (UIColor *)colorAtPixel:(CGPoint)point :(UIImage*)image {
// Cancel if point is outside image coordinates
if (!CGRectContainsPoint(CGRectMake(0.0f, 0.0f, image.size.width, image.size.height), point)) {
return nil;
}
NSInteger pointX = trunc(point.x);
NSInteger pointY = trunc(point.y);
CGImageRef cgImage = image.CGImage;
NSUInteger width = image.size.width;
NSUInteger height = image.size.height;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bytesPerPixel = 4;
int bytesPerRow = bytesPerPixel * 1;
NSUInteger bitsPerComponent = 8;
unsigned char pixelData[4] = { 0, 0, 0, 0 };
CGContextRef context = CGBitmapContextCreate(pixelData,
1,
1,
bitsPerComponent,
bytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Draw the pixel we are interested in onto the bitmap context
CGContextTranslateCTM(context, -pointX, pointY-(CGFloat)height);
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, (CGFloat)width, (CGFloat)height), cgImage);
CGContextRelease(context);
// Convert color values [0..255] to floats [0.0..1.0]
CGFloat red = (CGFloat)pixelData[0] / 255.0f;
CGFloat green = (CGFloat)pixelData[1] / 255.0f;
CGFloat blue = (CGFloat)pixelData[2] / 255.0f;
CGFloat alpha = (CGFloat)pixelData[3] / 255.0f;
return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
}
I want the image to look transparent. I change the Black background of the context to transparent using CGContextSetRGBFillColor(context, 0 , 0, 0 , 0); //set transparent
which works fine. But there it still leaves a black border on the image.I want to remove it.
How can this be achieved? Any pointers?
One way would be to make a soft threshold instead of a hard limit. For example, check if the luminance of a given pixel is below some small, but non-zero amount. If so, set the alpha of the pixel to some interpolated value. For example:
const double epsilon = 0.1; // or some other small value
double luminance = (red * 0.2126) + (green * 0.7152) + (blue * 0.0722);
if (luminance < epsilon)
{
CGContextSetRGBFillColor(context, red, green, blue, luminance / epsilon);
}
else
{
CGContextSetRGBFillColor(context, red, green, blue, 1.0);
}
This question already exists:
replacing specific color in a uiimage
Closed 9 years ago.
I want to change color of UIImage with another color, firstly i want to touch the image and detect the color of the touched pixel and then want to replace color of touched pixel with another color.
I have following code in which i am trying to change touched pixel color but its returning transparent image.
- (UIColor *)colorAtPixel:(CGPoint)point
{
// Cancel if point is outside image coordinates
if (!CGRectContainsPoint(CGRectMake(0.0f, 0.0f, self.size.width, self.size.height), point)) {
return nil;
}
// Create a 1x1 pixel byte array and bitmap context to draw the pixel into.
// Reference: http://stackoverflow.com/questions/1042830/retrieving-a-pixel-alpha-value-for-a-uiimage
NSInteger pointX = trunc(point.x);
NSInteger pointY = trunc(point.y);
CGImageRef cgImage = self.CGImage;
NSUInteger width = self.size.width;
NSUInteger height = self.size.height;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bytesPerPixel = 4;
int bytesPerRow = bytesPerPixel * 1;
NSUInteger bitsPerComponent = 8;
unsigned char pixelData[4] = { 0, 0, 0, 0 };
CGContextRef context = CGBitmapContextCreate(pixelData,
1,
1,
bitsPerComponent,
bytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Draw the pixel we are interested in onto the bitmap context
CGContextTranslateCTM(context, -pointX, pointY-(CGFloat)height);
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, (CGFloat)width, (CGFloat)height), cgImage);
CGContextRelease(context);
// Convert color values [0..255] to floats [0.0..1.0]
red = (CGFloat)pixelData[0] / 255.0f;
green = (CGFloat)pixelData[1] / 255.0f;
blue = (CGFloat)pixelData[2] / 255.0f;
alpha = (CGFloat)pixelData[3] / 255.0f;
[self changeWhiteColorTransparent:imageview.img];
return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
}
-(void)changeWhiteColorTransparent: (UIImage *)image{
CGImageRef rawImageRef = image.CGImage;
const float colorMasking[6] = { red, 0, green, 0, blue, 0 };
UIGraphicsBeginImageContext(image.size);
CGImageRef maskedImageRef = CGImageCreateWithMaskingColors(rawImageRef, colorMasking);
{
//if in iphone
CGContextTranslateCTM(UIGraphicsGetCurrentContext(), 0.0, image.size.height);
CGContextScaleCTM(UIGraphicsGetCurrentContext(), 1.0, -1.0);
}
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, image.size.width, image.size.height), maskedImageRef);
UIImage *result = UIGraphicsGetImageFromCurrentImageContext();
CGImageRelease(maskedImageRef);
UIGraphicsEndImageContext();
NSString *imagespath =[self createDirectoryInDocumentsFolderWithName:#"images"];
NSFileManager *fileM = [NSFileManager defaultManager];
NSArray *contents= [fileM contentsOfDirectoryAtPath:imagespath error:nil];
NSString *savedImagePath = [imagespath stringByAppendingPathComponent:[NSString stringWithFormat:#"images%d.png",[contents count] + 1]];
NSData *imageData = UIImagePNGRepresentation(result);
[imageData writeToFile:savedImagePath atomically:NO];
}
here i an trying to make touched colour transparent but if i want to change it to yellow color then what i need to change i code?
Call this method form uitouch move...
-(void) getPixelColorAtLocation:(CGPoint)point
{
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.layer renderInContext:context];
// NSLog(#"x- %f y- %f",point.x,point.y);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
NSLog(#"RGB Color code :%d %d %d",pixel[0],pixel[1],pixel[2]);
}
set this RGB value to your imageview .
You can get color code of touch point in RGB colr combination. try it.
it will help you.
I'm trying to change color of an image in a background thread.
Apple doc says UIGraphicsBeginImageContext can only be called from main thread, and I'm trying to use CGBitmapContextCreate:
context = CGBitmapContextCreate
(bitmapData,
pixelsWide,
pixelsHigh,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedFirst);
I have two versions of "changeColor" first one using UIGraphisBeginImageContext, second one using CGBitmapContextCreate.
The first one correctly changes color, but second one doesn't.
Why is that?
- (UIImage*) changeColor: (UIColor*) aColor
{
if(aColor == nil)
return self;
UIGraphicsBeginImageContext(self.size);
CGRect bounds;
bounds.origin = CGPointMake(0,0);
bounds.size = self.size;
[aColor set];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0, self.size.height);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextClipToMask(context, bounds, [self CGImage]);
CGContextFillRect(context, bounds);
UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return img;
}
- (UIImage*) changeColor: (UIColor*) aColor
{
if(aColor == nil)
return self;
CGContextRef context = CreateARGBBitmapContext(self.size);
CGRect bounds;
bounds.origin = CGPointMake(0,0);
bounds.size = self.size;
CGColorRef colorRef = aColor.CGColor;
const CGFloat *components = CGColorGetComponents(colorRef);
float red = components[0];
float green = components[1];
float blue = components[2];
CGContextSetRGBFillColor(context, red, green, blue, 1);
CGContextClipToMask(context, bounds, [self CGImage]);
CGContextFillRect(context, bounds);
CGImageRef imageRef = CGBitmapContextCreateImage(context);
UIImage* img = [UIImage imageWithCGImage: imageRef];
unsigned char* data = (unsigned char*)CGBitmapContextGetData (context);
CGContextRelease(context);
free(data);
return img;
}
CGContextRef CreateARGBBitmapContext(CGSize size)
{
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
void * bitmapData;
int bitmapByteCount;
int bitmapBytesPerRow;
// Get image width, height. We'll use the entire image.
size_t pixelsWide = size.width;
size_t pixelsHigh = size.height;
// Declare the number of bytes per row. Each pixel in the bitmap in this
// example is represented by 4 bytes; 8 bits each of red, green, blue, and
// alpha.
bitmapBytesPerRow = (pixelsWide * 4);
bitmapByteCount = (bitmapBytesPerRow * pixelsHigh);
// Use the generic RGB color space.
colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL)
{
fprintf(stderr, "Error allocating color space\n");
return NULL;
}
// Allocate memory for image data. This is the destination in memory
// where any drawing to the bitmap context will be rendered.
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL)
{
fprintf (stderr, "Memory not allocated!");
CGColorSpaceRelease( colorSpace );
return NULL;
}
// Create the bitmap context. We want pre-multiplied ARGB, 8-bits
// per component. Regardless of what the source image format is
// (CMYK, Grayscale, and so on) it will be converted over to the format
// specified here by CGBitmapContextCreate.
context = CGBitmapContextCreate (bitmapData,
pixelsWide,
pixelsHigh,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedFirst);
if (context == NULL)
{
free (bitmapData);
fprintf (stderr, "Context not created!");
}
// Make sure and release colorspace before returning
CGColorSpaceRelease( colorSpace );
return context;
}
Your second method is doing work that the first never did. Here's an adjustment of the second method to more closely match the first one:
- (UIImage*) changeColor: (UIColor*) aColor
{
if(aColor == nil)
return self;
CGContextRef context = CreateARGBBitmapContext(self.size);
CGRect bounds;
bounds.origin = CGPointMake(0,0);
bounds.size = self.size;
CGContextSetFillColorWithColor(aColor.CGColor);
CGContextClipToMask(context, bounds, [self CGImage]);
CGContextFillRect(context, bounds);
CGImageRef imageRef = CGBitmapContextCreateImage(context);
UIImage* img = [UIImage imageWithCGImage: imageRef];
CGContextRelease(context);
return img;
}
The two changes I made were I converted this to use CGContextSetFillColorWithColor(), and I removed the dangerous and incorrect free() of the backing data of the bitmap context. If this code snippet does not behave identically to the first one, then you will have to look at your implementation of CreateARGBBitmapContext() to verify that it is correct.
Of course, as Brad Larson mentioned in the comments, if you're targeting iOS 4.0 and above, the UIKit graphics methods are (according to the release notes) thread-safe and you should be able to use the first method just fine.