Handling touches in CALayer - ios

I'm trying to detect touches in my view.
My code:
- (CALayer *)layerForTouch:(UITouch *)touch {
UIView *view = self;
CGPoint location = [touch locationInView:view];
location = [view convertPoint:location toView:nil];
CALayer *hitPresentationLayer = [view.layer.presentationLayer hitTest:location];
if (hitPresentationLayer) {
return hitPresentationLayer.modelLayer;
}
return nil;
}
But I found problem. It's detecting just first layer in tree.
Every figure on screenshot - one layer. I'm drawing it's from the biggest to the smallest as tree.

if your topmost layer covers the whole screen (or other below layers) then yes you will get the top most layer during hitTest check. you can do a containsPoint check for all your layers if you want to check it further below. Do find some sample code below
#interface ViewController ()
#property (nonatomic, strong) CALayer *layer1;
#property (nonatomic, strong) CALayer *layer2;
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
CALayer *layer = [[CALayer alloc] init];
layer.frame = (CGRect) { 10, 10, 100, 100 };
[self.view.layer addSublayer:layer];
layer.backgroundColor = [UIColor redColor].CGColor;
self.layer1 = layer;
layer = [[CALayer alloc] init];
layer.frame = (CGRect) { 60, 60, 100, 100 };
[self.view.layer addSublayer:layer];
layer.backgroundColor = [UIColor blueColor].CGColor;
self.layer2 = layer;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
CGPoint touchPoint = [(UITouch*)[touches anyObject] locationInView:self.view];
CALayer *touchedLayer = [self.view.layer.presentationLayer hitTest:touchPoint];
CALayer *originalLayer = [touchedLayer modelLayer];
if (originalLayer == self.layer1) { NSLog(#"layer 1 touched"); }
if (originalLayer == self.layer2) { NSLog(#"layer 2 touched"); }
for ( CALayer *layer in #[self.layer1, self.layer2])
{
CGPoint point = [layer convertPoint:touchPoint fromLayer:self.view.layer];
NSLog(#"layer %# containsPoint ? %#",layer, [layer containsPoint:point]? #"YES":#"NO");
NSLog(#"original point : %# | converted point : %#",NSStringFromCGPoint(touchPoint),NSStringFromCGPoint(point));
}
}
#end
Or you can try using CAShapeLayers & match with the CGPath of ShapeLayer to see if its touched like the example below ..
#interface ViewController ()
#property (nonatomic, strong) CALayer *layer1;
#property (nonatomic, strong) CALayer *layer2;
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
CAShapeLayer *shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.path = [UIBezierPath bezierPathWithRect:(CGRect){ 0,0,100,10 }].CGPath;
shapeLayer.backgroundColor = [UIColor redColor].CGColor;
shapeLayer.frame = (CGRect){20, 20, 100, 100};
[self.view.layer addSublayer:shapeLayer];
self.layer1 = shapeLayer;
shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.path = [UIBezierPath bezierPathWithRect:(CGRect){ 0,0,10,100 }].CGPath;
shapeLayer.backgroundColor = [[UIColor blueColor] colorWithAlphaComponent:0.6].CGColor;
shapeLayer.frame = (CGRect){60, 60, 100, 100};
[self.view.layer addSublayer:shapeLayer];
self.layer2 = shapeLayer;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
CGPoint touchPoint = [(UITouch*)[touches anyObject] locationInView:self.view];
CALayer *touchedLayer = [self.view.layer.presentationLayer hitTest:touchPoint];
CALayer *originalLayer = [touchedLayer modelLayer];
for ( CAShapeLayer *layer in #[self.layer1, self.layer2])
{
if (![layer isKindOfClass:[CAShapeLayer class]]) { continue; }
CGPoint point = [layer convertPoint:touchPoint fromLayer:self.view.layer];
if (CGPathContainsPoint(layer.path, 0, point, 0))
{
NSLog(#"match found");
if (originalLayer == self.layer1) { NSLog(#"layer 1 touched"); }
if (originalLayer == self.layer2) { NSLog(#"layer 2 touched"); }
return;
}
}
}

Related

How to optimize CPU and RAM usage on a UIView which uses `drawInRect:blendMode:`?

I'm developing a drawing app which lets user to draw one UIBezierpath just once over an image which has been drawn in the UIView by drawInRect:blendMode in drawRect method and it's uses over 80% of CPU and 55MB of RAM and when I traced it down in the time profiler, it was drawInRect:blendMode method that was causing most of the CPU usage. I have already done few optimizations but what further optimizations can I do for both drawing the UIImage which is being drawn using [image drawInRect:Rect]; and also for the UIBezierpath which user draws on top of it?
Thanks in advance.
#interface InsideView(){
CGRect imageRect;
CGRect targetBounds;
}
#property (strong, nonatomic) NSMutableArray *strokes;
#property (nonatomic, strong) UIImage * img;
#property (nonatomic, strong) UIBezierPath *drawingPath;
#end
#implementation InsideView
-(void)drawRect:(CGRect)rect{
targetBounds = self.layer.bounds;
imageRect = AVMakeRectWithAspectRatioInsideRect(self.img.size,targetBounds);
[self.img drawInRect:imageRect];
for (int i=0; i < [self.strokes count]; i++) {
UIBezierPath* tmp = (UIBezierPath*)[self.strokes objectAtIndex:i];
[[UIColor whiteColor] setStroke];
[tmp stroke];
}
}
- (CGRect)segmentBoundsFrom:(CGPoint)point1 to:(CGPoint)point2
{
CGRect dirtyPoint1 = CGRectMake(point1.x-10, point1.y-10, 50, 50);
CGRect dirtyPoint2 = CGRectMake(point2.x-10, point2.y-10, 50, 50);
return CGRectUnion(dirtyPoint1, dirtyPoint2);
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event{
self.drawingPath = [UIBezierPath bezierPath];
self.drawingPath.lineWidth = 10;
self.drawingPath.lineCapStyle = kCGLineCapRound;
self.drawingPath.lineJoinStyle = kCGLineJoinRound;
[self.drawingPath moveToPoint:[[touches anyObject] locationInView:self]];
[self.strokes addObject:self.drawingPath];
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{
//Dirty Rect Calculations
CGPoint prevPoint = CGPathGetCurrentPoint(self.drawingPath.CGPath);
CGPoint point = [[touches anyObject] locationInView:self];
CGRect dirty = [self segmentBoundsFrom:prevPoint to:point];
[[self.strokes lastObject] addLineToPoint:point];
[self setNeedsDisplayInRect:dirty];
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event{
[[self.strokes lastObject] addLineToPoint:[[touches anyObject] locationInView:self]];
[self setNeedsDisplay];
}

iOS method containsPoint is not working

I have a view like this:
// .h file
#interface LayerBubbleView : UIView
/**
* click handler
*/
#property (nonatomic, copy) void (^clickHandler)(NSString* identifier, CAShapeLayer* layer);
-(instancetype)initWithFrame:(CGRect)frame withDataArray:(NSArray *)dataArray;
#end
// .m file
#import "BDPLayerBubbleView.h"
#interface BDPLayerBubbleView ()
#property (nonatomic, strong) NSMutableArray *paths;
#property (nonatomic, strong) NSMutableArray *identifiers;
#end
#implementation BDPLayerBubbleView
-(instancetype)initWithFrame:(CGRect)frame withDataArray:(NSArray *)dataArray {
self = [self initWithFrame:frame];
if (self) {
_paths = [[NSMutableArray alloc] init];
_identifiers = [[NSMutableArray alloc] init];
for (int i = 0; i < dataArray.count; i++) {
NSArray *data = dataArray[i];
CGRect frame = [data[0] CGRectValue]; // please ignore here, just for demo
CGFloat radius = [data[1] doubleValue];
CGPoint center = [data[2] CGPointValue];
UIColor *fillColor = data[3];
UIColor *borderColor = data[4];
NSString *identifier = data[5];
CAShapeLayer *shapeLayer = [CAShapeLayer layer];
UIBezierPath *bezierPath = [UIBezierPath bezierPathWithRoundedRect:frame cornerRadius:radius];
[_paths addObject:bezierPath];
[_identifiers addObject:identifier];
shapeLayer.path = [bezierPath CGPath];
shapeLayer.strokeColor = [borderColor CGColor];
shapeLayer.fillColor = [fillColor CGColor];
shapeLayer.lineWidth = 0.5;
shapeLayer.bounds = frame;
shapeLayer.position = center;
[self.layer addSublayer:shapeLayer];
}
}
return self;
}
#pragma mark - Touch handling
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
CGPoint touchPoint = [touch locationInView:self];
for(int i=0;i<[_paths count];i++) {
UIBezierPath* path = _paths[i];
CAShapeLayer *shapeLayer = self.layer.sublayers[i];
if ([path containsPoint:touchPoint] || [shapeLayer containsPoint:touchPoint])
{
NSString *identifier = _identifiers[i];
if([self.layer.sublayers[i] isKindOfClass:CAShapeLayer.class] && [identifier length] > 0) {
CAShapeLayer* l = self.layer.sublayers[i];
if(_clickHandler) {
_clickHandler(identifier, l);
}
}
}
}
}
The logic is, there are lots of bubble layers on the view, and when user taps on the bubble, I can detect which bubble layer user taps, so I can trigger the corresponding clickHandler and highlight the bubble layer.
However, neither [path containsPoint:touchPoint] nor [shapeLayer containsPoint:touchPoint] return YES for all the sub layers. I am not sure what's wrong with my code?
So I was suggest to use CGPathContainsPoint and CGRectContainsPoint to detect. I tested, CGPathContainsPoint DOES NOT work while CGRectContainsPointDID work.
I am now confused the differences among the CG APIs and the UIKit wrapped APIs. Could any expert to explain? I still hope containsPoint can work.

touchesMoved drawing in CAShapeLayer slow/laggy

As was suggested to me in a prior StackOverflow question, I'm trying to improve my drawing method for letting my user draw lines/dots into a UIView. I'm now trying to draw using a CAShapeLayer instead of dispatch_async. This all works correctly, however, drawing into the CAShapeLayer continuously while touches are moving becomes slow and the path lags behind, whereas my old (inefficient I was told) code worked beautifully smooth and fast. You can see my old code commented out below.
Is there any way to improve the performance for what I want to do? Maybe I'm overthinking something.
I'd appreciate any help offered.
Code:
#property (nonatomic, assign) NSInteger center;
#property (nonatomic, strong) CAShapeLayer *drawLayer;
#property (nonatomic, strong) UIBezierPath *drawPath;
#property (nonatomic, strong) UIView *drawView;
#property (nonatomic, strong) UIImageView *drawingImageView;
CGPoint points[4];
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.center = 0;
points[0] = [touch locationInView:self.drawView];
if (!self.drawLayer)
{
CAShapeLayer *layer = [CAShapeLayer layer];
layer.lineWidth = 3.0;
layer.lineCap = kCALineCapRound;
layer.strokeColor = self.inkColor.CGColor;
layer.fillColor = [[UIColor clearColor] CGColor];
[self.drawView.layer addSublayer:layer];
self.drawView.layer.masksToBounds = YES;
self.drawLayer = layer;
}
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.center++;
points[self.center] = [touch locationInView:self.drawView];
if (self.center == 3)
{
UIBezierPath *path = [UIBezierPath bezierPath];
points[2] = CGPointMake((points[1].x + points[3].x)/2.0, (points[1].y + points[3].y)/2.0);
[path moveToPoint:points[0]];
[path addQuadCurveToPoint:points[2] controlPoint:points[1]];
points[0] = points[2];
points[1] = points[3];
self.center = 1;
[self drawWithPath:path];
}
}
- (void)drawWithPath:(UIBezierPath *)path
{
if (!self.drawPath)
{
self.drawPath = [UIBezierPath bezierPath];
}
[self.drawPath appendPath:path];
self.drawLayer.path = self.drawPath.CGPath;
[self.drawLayer setNeedsDisplay];
// Below code worked faster and didn't lag behind at all really
/*
dispatch_async(dispatch_get_main_queue(),
^{
UIGraphicsBeginImageContextWithOptions(self.drawingImageView.bounds.size, NO, 0.0);
[self.drawingImageView.image drawAtPoint:CGPointZero];
[self.inkColor setStroke];
[path stroke];
self.drawingImageView.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
});
*/
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
if (self.center == 0)
{
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:points[0]];
[path addLineToPoint:points[0]];
[self drawWithPath:path];
}
self.drawLayer = nil;
self.drawPath = nil;
}
This problem intrigued me as I've always found UIBezierPath/shapeLayer to be reletivly fast.
It's important to note that in your code above, you continues to add points to drawPath. As this increases, the appendPath method becomes a real resource burden. Similarly, there is no point in successively rendering the same points over and over.
As a side note, there is a visible performance difference when increasing lineWidth and adding lineCap (regardless of approach). For the sake of comparing Apples with Apples, in the test below, I've left both to default values.
I took your above code and changed it a little. The technique I've used is to add touchPoints to the BezierPath up to a per-determined number, before committing the current rendering to image. This is similar to your original approach, however, given that it's not happening with every touchEvent. it's far less CPU intensive. I tested both approaches on the slowest device I have (iPhone 4S) and noted that CPU utilization on your initial implementation was consistently around 75-80% whilst drawing. Whilst with the modified/CAShapeLayer approach, CPU utilization was consistently around 10-15% Memory usage also remained minimal with the second approach.
Below is the Code I used;
#interface MDtestView () // a simple UIView Subclass
#property (nonatomic, assign) NSInteger cPos;
#property (nonatomic, strong) CAShapeLayer *drawLayer;
#property (nonatomic, strong) UIBezierPath *drawPath;
#property (nonatomic, strong) NSMutableArray *bezierPoints;
#property (nonatomic, assign) NSInteger pointCount;
#property (nonatomic, strong) UIImageView *drawingImageView;
#end
#implementation MDtestView
CGPoint points[4];
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
}
return self;
}
-(id)initWithCoder:(NSCoder *)aDecoder{
self = [super initWithCoder:aDecoder];
if (self) {
//
}
return self;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.cPos = 0;
points[0] = [touch locationInView:self];
if (!self.drawLayer)
{
// this should be elsewhere but kept it here to follow your code
self.drawLayer = [CAShapeLayer layer];
self.drawLayer.backgroundColor = [UIColor clearColor].CGColor;
self.drawLayer.anchorPoint = CGPointZero;
self.drawLayer.frame = self.frame;
//self.drawLayer.lineWidth = 3.0;
// self.drawLayer.lineCap = kCALineCapRound;
self.drawLayer.strokeColor = [UIColor redColor].CGColor;
self.drawLayer.fillColor = [[UIColor clearColor] CGColor];
[self.layer insertSublayer:self.drawLayer above:self.layer ];
self.drawingImageView = [UIImageView new];
self.drawingImageView.frame = self.frame;
[self addSubview:self.drawingImageView];
}
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
if (!self.drawPath)
{
self.drawPath = [UIBezierPath bezierPath];
// self.drawPath.lineWidth = 3.0;
// self.drawPath.lineCapStyle = kCGLineCapRound;
}
// grab the current time for testing Path creation and appending
CFAbsoluteTime cTime = CFAbsoluteTimeGetCurrent();
self.cPos++;
//points[self.cPos] = [touch locationInView:self.drawView];
points[self.cPos] = [touch locationInView:self];
if (self.cPos == 3)
{
/* uncomment this block to test old method
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:points[0]];
points[2] = CGPointMake((points[1].x + points[3].x)/2.0, (points[1].y + points[3].y)/2.0);
[path addQuadCurveToPoint:points[2] controlPoint:points[1]];
points[0] = points[2];
points[1] = points[3];
self.cPos = 1;
dispatch_async(dispatch_get_main_queue(),
^{
UIGraphicsBeginImageContextWithOptions(self.drawingImageView.bounds.size, NO, 0.0);
[self.drawingImageView.image drawAtPoint:CGPointZero];
// path.lineWidth = 3.0;
// path.lineCapStyle = kCGLineCapRound;
[[UIColor redColor] setStroke];
[path stroke];
self.drawingImageView.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"it took %.2fms to draw via dispatchAsync", 1000.0*(CFAbsoluteTimeGetCurrent() - cTime));
});
*/
// I've kept the original structure in place, whilst comparing apples for apples. we really don't need to create
// a new bezier path and append it. We can simply add the points to the global drawPath, and zero it at an
// appropriate point. This would also eliviate the need for appendPath
// /*
[self.drawPath moveToPoint:points[0]];
points[2] = CGPointMake((points[1].x + points[3].x)/2.0, (points[1].y + points[3].y)/2.0);
[self.drawPath addQuadCurveToPoint:points[2] controlPoint:points[1]];
points[0] = points[2];
points[1] = points[3];
self.cPos = 1;
self.drawLayer.path = self.drawPath.CGPath;
NSLog(#"it took %.2fms to render %i bezier points", 1000.0*(CFAbsoluteTimeGetCurrent() - cTime), self.pointCount);
// 1 point for MoveToPoint, and 2 points for addQuadCurve
self.pointCount += 3;
if (self.pointCount > 100) {
self.pointCount = 0;
[self commitCurrentRendering];
}
// */
}
}
- (void)commitCurrentRendering{
CFAbsoluteTime cTime = CFAbsoluteTimeGetCurrent();
#synchronized(self){
CGRect paintLayerBounds = self.drawLayer.frame;
UIGraphicsBeginImageContextWithOptions(paintLayerBounds.size, NO, [[UIScreen mainScreen]scale]);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(context, kCGBlendModeCopy);
[self.layer renderInContext:context];
CGContextSetBlendMode(context, kCGBlendModeNormal);
[self.drawLayer renderInContext:context];
UIImage *previousPaint = UIGraphicsGetImageFromCurrentImageContext();
self.layer.contents = (__bridge id)(previousPaint.CGImage);
UIGraphicsEndImageContext();
[self.drawPath removeAllPoints];
}
NSLog(#"it took %.2fms to save the context", 1000.0*(CFAbsoluteTimeGetCurrent() - cTime));
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
if (self.cPos == 0)
{
/* //not needed
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:points[0]];
[path addLineToPoint:points[0]];
[self drawWithPath:path];
*/
}
if (self.cPos == 2) {
[self commitCurrentRendering];
}
// self.drawLayer = nil;
[self.drawPath removeAllPoints];
}
#end

iOS hitTest withEvent

I'm attempting to get the reference to a UImageView that is underneath a masked UIImageView using hitTest withEvent. Here is what I have that is not working:
UIView A that contains 3 UIImageViews as subviews: panelOne, panelTwo, and panelThree. panelThree is takes up the entire frame but is masked into a triangle, revealing parts of panels one and two. So I need to detect when a user taps outside of that rectangle and send the touch to the appropriate UIImageView.
Code: (CollagePanel is a subclass of UIImageView)
-(void)triangleInASquare
{
CGSize size = self.frame.size;
CollagePanel *panelOne = [[CollagePanel alloc] initWithFrame:CGRectMake(0,0, size.width/2, size.height)];
panelOne.panelScale = panelOne.frame.size.width/self.frame.size.width;
panelOne.backgroundColor = [UIColor greenColor];
CollagePanel *panelTwo = [[CollagePanel alloc] initWithFrame:CGRectMake(size.width/2,0, size.width/2, size.height)];
panelTwo.panelScale = panelOne.frame.size.width/self.frame.size.width;
panelTwo.backgroundColor = [UIColor purpleColor];
CollagePanel *panelThree = [[CollagePanel alloc] initWithFrame:CGRectMake(0,0, size.width, size.height)];
panelThree.backgroundColor = [UIColor orangeColor];
UIBezierPath* trianglePath = [UIBezierPath bezierPath];
[trianglePath moveToPoint:CGPointMake(0, panelThree.frame.size.height)];
[trianglePath addLineToPoint:CGPointMake(panelThree.frame.size.width/2,0)];
[trianglePath addLineToPoint:CGPointMake(panelThree.frame.size.width, panelTwo.frame.size.height)];
[trianglePath closePath];
// Mask the panels's layer to triangle.
CAShapeLayer *triangleMaskLayer = [CAShapeLayer layer];
[triangleMaskLayer setPath:trianglePath.CGPath];
triangleMaskLayer.strokeColor = [[UIColor whiteColor] CGColor];
panelThree.layer.mask = triangleMaskLayer;
//Add border
CAShapeLayer *borderLayer = [CAShapeLayer layer];
borderLayer.strokeColor = [[UIColor whiteColor] CGColor];
borderLayer.fillColor = [[UIColor clearColor] CGColor];
borderLayer.lineWidth = 6;
[borderLayer setPath:trianglePath.CGPath];
[panelThree.layer addSublayer:borderLayer];
NSMutableArray *tempArray = [[NSMutableArray alloc] init];
[tempArray addObject:panelOne];
[tempArray addObject:panelTwo];
[tempArray addObject:panelThree];
[self addGestureRecognizersToPanelsInArray:tempArray];
[self addPanelsFromArray:tempArray];
self.panelArray = tempArray;
}
-(void)handleTap: (UITapGestureRecognizer*) recognizer //coming from panel.imageView
{
CGPoint tapPoint = [recognizer locationInView:self];
NSLog(#"Location in self: %#", NSStringFromCGPoint(tapPoint));
NSLog(#"self.subviews: %#", self.subviews);
UIView *bottomView = [self hitTest:tapPoint withEvent:nil];
NSLog(#"Bottom View: %#", bottomView);
}
The NSLog of bottomView is always panelThree (the topmost panel). From what I understand the hit test should be returning the "bottom most" subview.
you understand wrong. it will return the view that recognizes itself as touched and is nearest to your finger, nearer to the top.
If a view shall not recognize itself as touch for a certain point, you need to overwrite
- (BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event
for that view.
I think Ole Begemann's Shapped Button is a great example how to do so.
In your project this method could determine, if a point lies within the paths: CGPathContainsPoint.
Your pointInside:withEvent: might look like this:
#import "CollagePanel.h"
#import <QuartzCore/QuartzCore.h>
#implementation CollagePanel
//
// ...
//
- (BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event
{
CGPoint p = [self convertPoint:point toView:[self superview]];
if(self.layer.mask){
if (CGPathContainsPoint([(CAShapeLayer *)self.layer.mask path], NULL, p, YES) )
return YES;
}else {
if(CGRectContainsPoint(self.layer.frame, p))
return YES;
}
return NO;
}
#end

Change UIBezierPath Colour when selected

I draw 3 squares in a - LayoutView
- (void)drawRect:(CGRect)rect
self.room1 = [UIBezierPath bezierPathWithRect:CGRectMake(81, 10, 60, 60)];
[self.normalColor setFill];
[self.room1 fill];
[[UIColor blackColor]setStroke];
self.room1.lineWidth = 1;
[self.room1 stroke];
then I find the correct UIBezierPath with
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
NSLog(#"touch here");
UITouch *touch = [touches anyObject];
CGPoint touchPoint = [touch locationInView:self];
if ([self.room1 containsPoint:touchPoint])
{
// do stuff
NSLog(#"room1 %#" , self.room1);
[[UIColor redColor] setFill];
[self.room1 fill];
[self setNeedsDisplay];
}
}
this is working I touch room 1 and the log print "room1"
But how do I change the colour of the room1 ?
At the moment I get an error
: CGContextSetFillColorWithColor: invalid context 0x0. This is a serious error. ...
thanks for your help.
One way to accomplish this, is to keep track of the selected state in the touchesBegan method, and keep all the fill and setFill statements inside drawRect. In the following example, I toggle the selected state with each touch inside the square which alternates the color between blue and red.
#interface RDView ()
#property (strong,nonatomic) UIBezierPath *room1;
#property (strong,nonatomic) UIColor *normalColor;
#property (strong,nonatomic) UIColor *selectedColor;
#property (nonatomic) BOOL isSelected;
#end
#implementation RDView
-(id)initWithCoder:(NSCoder *)aDecoder {
if (self = [super initWithCoder:aDecoder]) {
self.normalColor = [UIColor blueColor];
self.selectedColor = [UIColor redColor];
self.isSelected = NO;
}
return self;
}
- (void)drawRect:(CGRect)rect {
self.room1 = [UIBezierPath bezierPathWithRect:CGRectMake(81, 10, 60, 60)];
UIColor *colorToUse = (self.isSelected)? self.selectedColor : self.normalColor;
[colorToUse setFill];
[self.room1 fill];
[[UIColor blackColor]setStroke];
self.room1.lineWidth = 1;
[self.room1 stroke];
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
CGPoint touchPoint = [touches.anyObject locationInView:self];
if ([self.room1 containsPoint:touchPoint]){
self.isSelected = ! self.isSelected;
[self setNeedsDisplay];
}
}

Resources