iOS method containsPoint is not working - ios

I have a view like this:
// .h file
#interface LayerBubbleView : UIView
/**
* click handler
*/
#property (nonatomic, copy) void (^clickHandler)(NSString* identifier, CAShapeLayer* layer);
-(instancetype)initWithFrame:(CGRect)frame withDataArray:(NSArray *)dataArray;
#end
// .m file
#import "BDPLayerBubbleView.h"
#interface BDPLayerBubbleView ()
#property (nonatomic, strong) NSMutableArray *paths;
#property (nonatomic, strong) NSMutableArray *identifiers;
#end
#implementation BDPLayerBubbleView
-(instancetype)initWithFrame:(CGRect)frame withDataArray:(NSArray *)dataArray {
self = [self initWithFrame:frame];
if (self) {
_paths = [[NSMutableArray alloc] init];
_identifiers = [[NSMutableArray alloc] init];
for (int i = 0; i < dataArray.count; i++) {
NSArray *data = dataArray[i];
CGRect frame = [data[0] CGRectValue]; // please ignore here, just for demo
CGFloat radius = [data[1] doubleValue];
CGPoint center = [data[2] CGPointValue];
UIColor *fillColor = data[3];
UIColor *borderColor = data[4];
NSString *identifier = data[5];
CAShapeLayer *shapeLayer = [CAShapeLayer layer];
UIBezierPath *bezierPath = [UIBezierPath bezierPathWithRoundedRect:frame cornerRadius:radius];
[_paths addObject:bezierPath];
[_identifiers addObject:identifier];
shapeLayer.path = [bezierPath CGPath];
shapeLayer.strokeColor = [borderColor CGColor];
shapeLayer.fillColor = [fillColor CGColor];
shapeLayer.lineWidth = 0.5;
shapeLayer.bounds = frame;
shapeLayer.position = center;
[self.layer addSublayer:shapeLayer];
}
}
return self;
}
#pragma mark - Touch handling
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
CGPoint touchPoint = [touch locationInView:self];
for(int i=0;i<[_paths count];i++) {
UIBezierPath* path = _paths[i];
CAShapeLayer *shapeLayer = self.layer.sublayers[i];
if ([path containsPoint:touchPoint] || [shapeLayer containsPoint:touchPoint])
{
NSString *identifier = _identifiers[i];
if([self.layer.sublayers[i] isKindOfClass:CAShapeLayer.class] && [identifier length] > 0) {
CAShapeLayer* l = self.layer.sublayers[i];
if(_clickHandler) {
_clickHandler(identifier, l);
}
}
}
}
}
The logic is, there are lots of bubble layers on the view, and when user taps on the bubble, I can detect which bubble layer user taps, so I can trigger the corresponding clickHandler and highlight the bubble layer.
However, neither [path containsPoint:touchPoint] nor [shapeLayer containsPoint:touchPoint] return YES for all the sub layers. I am not sure what's wrong with my code?
So I was suggest to use CGPathContainsPoint and CGRectContainsPoint to detect. I tested, CGPathContainsPoint DOES NOT work while CGRectContainsPointDID work.
I am now confused the differences among the CG APIs and the UIKit wrapped APIs. Could any expert to explain? I still hope containsPoint can work.

Related

diagonal lines from the top right trying to draw on a UIView

I've got a tablet called a Slate that I can process x,y points from. I can pass these x,y points to my "drawShape" function, the equivalent of a touchesMoved function. I have a custom UIView class designed for this purpose with 4 main functions. The first three are touchesbegan/moved/ended and there is a drawRect.
Here's the source to the UIView subclass...
//
// DrawingLayerView.m
//
//
// Created by Monica Kachlon on 12/3/17.
//
#import "DrawingLayerView.h"
#include "UIBezierPath+Interpolation.h"
#interface StrokeA : NSObject
#property (assign, nonatomic) CGPoint startPoint;
- (id)initWithStartPoint:(CGPoint)point;
#property (strong, nonatomic) NSMutableArray * points;
#end
#implementation StrokeA
- (id)initWithStartPoint:(CGPoint)point {
self = [super init];
self.startPoint = point;
self.points = [NSMutableArray array];
return self;
}
#end
#implementation DrawingLayerView
NSMutableArray * strokes;
NSMutableArray * points;
StrokeA * currentStroke;
UIBezierPath * currentPath;
UIBezierPath *path;
UIBezierPath *pathTwo;
UIBezierPath *Newpath;
CAShapeLayer * currentLayer;
- (void) noodlez
{
path = [UIBezierPath bezierPath];
currentPath = [UIBezierPath bezierPath];
pathTwo = [UIBezierPath bezierPath];
Newpath = [UIBezierPath bezierPath];
strokes = [NSMutableArray array];
points = [NSMutableArray array];
}
- (void)startTouch: (CGPoint)point
{
currentStroke = [[StrokeA alloc] initWithStartPoint:point];
[strokes addObject:currentStroke];
}
- (UIBezierPath *)createPath {
UIBezierPath * bezierPath = [UIBezierPath bezierPath];
for (StrokeA * stroke in strokes) {
[bezierPath moveToPoint:stroke.startPoint];
for (NSValue * value in stroke.points) {
[bezierPath addLineToPoint:[value CGPointValue]];
}
}
return bezierPath;
}
- (void)endTouch: (CGPoint)point
{
[points removeAllObjects];
[self setNeedsDisplay];
}
- (void)drawShape: (CGPoint)point
{
if (!currentStroke) {
[self startTouch:point];
}
[currentStroke.points addObject:[NSValue valueWithCGPoint:point]];
pathTwo = [self createPath];
[self setNeedsDisplay];
}
- (void)drawRect:(CGRect)rect {
[[UIColor blackColor] setStroke];
[pathTwo stroke];
}
#end
And here's the screenshot.
I tried writing "Ok?" Just to illustrate the issue.
Despite the fact that the code is lacking some detail and the question is poorly worded, the screenshot is clearly showing that the initial point for the path is (0,0) in each case. Dump the points in the paths out to the console and you'll see it. Figure out how those (0,0)'s are getting in there and you'll solve the problem.

How to optimize CPU and RAM usage on a UIView which uses `drawInRect:blendMode:`?

I'm developing a drawing app which lets user to draw one UIBezierpath just once over an image which has been drawn in the UIView by drawInRect:blendMode in drawRect method and it's uses over 80% of CPU and 55MB of RAM and when I traced it down in the time profiler, it was drawInRect:blendMode method that was causing most of the CPU usage. I have already done few optimizations but what further optimizations can I do for both drawing the UIImage which is being drawn using [image drawInRect:Rect]; and also for the UIBezierpath which user draws on top of it?
Thanks in advance.
#interface InsideView(){
CGRect imageRect;
CGRect targetBounds;
}
#property (strong, nonatomic) NSMutableArray *strokes;
#property (nonatomic, strong) UIImage * img;
#property (nonatomic, strong) UIBezierPath *drawingPath;
#end
#implementation InsideView
-(void)drawRect:(CGRect)rect{
targetBounds = self.layer.bounds;
imageRect = AVMakeRectWithAspectRatioInsideRect(self.img.size,targetBounds);
[self.img drawInRect:imageRect];
for (int i=0; i < [self.strokes count]; i++) {
UIBezierPath* tmp = (UIBezierPath*)[self.strokes objectAtIndex:i];
[[UIColor whiteColor] setStroke];
[tmp stroke];
}
}
- (CGRect)segmentBoundsFrom:(CGPoint)point1 to:(CGPoint)point2
{
CGRect dirtyPoint1 = CGRectMake(point1.x-10, point1.y-10, 50, 50);
CGRect dirtyPoint2 = CGRectMake(point2.x-10, point2.y-10, 50, 50);
return CGRectUnion(dirtyPoint1, dirtyPoint2);
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event{
self.drawingPath = [UIBezierPath bezierPath];
self.drawingPath.lineWidth = 10;
self.drawingPath.lineCapStyle = kCGLineCapRound;
self.drawingPath.lineJoinStyle = kCGLineJoinRound;
[self.drawingPath moveToPoint:[[touches anyObject] locationInView:self]];
[self.strokes addObject:self.drawingPath];
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{
//Dirty Rect Calculations
CGPoint prevPoint = CGPathGetCurrentPoint(self.drawingPath.CGPath);
CGPoint point = [[touches anyObject] locationInView:self];
CGRect dirty = [self segmentBoundsFrom:prevPoint to:point];
[[self.strokes lastObject] addLineToPoint:point];
[self setNeedsDisplayInRect:dirty];
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event{
[[self.strokes lastObject] addLineToPoint:[[touches anyObject] locationInView:self]];
[self setNeedsDisplay];
}

iOS: Why UIView's drawRect has better performance than CALayer's drawInContext

I am currently learning drawing with UIView and CALayer. I quickly did a drawing app to experiment with the two classes. I noticed CALayer had worse performance than UIView. Here is the code:
myView.m
#interface myView()
#property (nonatomic, strong) UIImageView *background;
#ifdef USE_LAYER
#property (nonatomic, strong) drawingLayer *drawCanvas;
#else
#property (nonatomic, strong) drawingView *drawCanvas;
#endif
#end
#implementation myView
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame])
{
self.background = [[UIImageView alloc] initWithFrame:frame];
[self addSubview:self.background];
#ifdef USE_LAYER
self.drawCanvas = [[drawingLayer alloc] init];
self.drawCanvas.frame = frame;
[self.layer addSublayer:self.drawCanvas];
#else
self.drawCanvas = [[drawingView alloc] initWithFrame:frame];
self.drawCanvas.backgroundColor = [UIColor clearColor];
[self addSubview:self.drawCanvas];
#endif
}
return self;
}
- (CGPoint)getPoint:(NSSet<UITouch *> *)touches
{
NSArray *touchesArray = [touches allObjects];
UITouch *touch = (UITouch *)[touchesArray objectAtIndex:0];
return [touch locationInView:self];
}
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
CGPoint point = [self getPoint:touches];
self.drawCanvas.points = [[NSMutableArray alloc] init];
self.drawCanvas.startPoint = point;
}
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
CGPoint point = [self getPoint:touches];
[self.drawCanvas.points addObject:[NSValue valueWithCGPoint:point]];
self.background.image = [self imageFromLayer:self.layer];
self.drawCanvas.points = nil;
[self.drawCanvas setNeedsDisplay];
}
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
CGPoint point = [self getPoint:touches];
[self.drawCanvas.points addObject:[NSValue valueWithCGPoint:point]];
[self.drawCanvas setNeedsDisplay];
}
- (UIImage *)imageFromLayer:(CALayer *)layer
{
UIGraphicsBeginImageContext([layer frame].size);
[layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return outputImage;
}
#end
drawingView.h
#interface drawingView : UIView
#property (nonatomic, assign) CGPoint startPoint;
#property (nonatomic, strong) NSMutableArray *points;
#end
drawingView.m
#implementation drawingView
- (void)drawRect:(CGRect)rect {
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetStrokeColorWithColor(context, [UIColor redColor].CGColor);
CGContextSetLineWidth(context, 2.0f);
CGContextMoveToPoint(context, self.startPoint.x, self.startPoint.y);
for (NSValue *point in self.points)
{
CGPoint location = [point CGPointValue];
CGContextAddLineToPoint(context, location.x, location.y);
}
CGContextStrokePath(context);
}
#end
I also have "drawingLayer" which is essentially the same thing as "drawingView", except it a subclass of CALayer and does the same drawing in "drawInContext".
What I found was the drawRect has much better performance, the drawing is almost in-sync with the touch position, with very minor lag.
Why is this the case? I expected the CALayer drawing to be at least the same, if not better. In fact, someone told me today that drawing in CALayer is hardware-accelerated, which is the preferred way if performance is a concern. Certainly this is NOT the case in my little experiment. Why?
It might be caused by CALayer's default animation.
Try to override the action(forKey event: String) -> CAAction? and always return nil while subclassing the CALayer Class

Handling touches in CALayer

I'm trying to detect touches in my view.
My code:
- (CALayer *)layerForTouch:(UITouch *)touch {
UIView *view = self;
CGPoint location = [touch locationInView:view];
location = [view convertPoint:location toView:nil];
CALayer *hitPresentationLayer = [view.layer.presentationLayer hitTest:location];
if (hitPresentationLayer) {
return hitPresentationLayer.modelLayer;
}
return nil;
}
But I found problem. It's detecting just first layer in tree.
Every figure on screenshot - one layer. I'm drawing it's from the biggest to the smallest as tree.
if your topmost layer covers the whole screen (or other below layers) then yes you will get the top most layer during hitTest check. you can do a containsPoint check for all your layers if you want to check it further below. Do find some sample code below
#interface ViewController ()
#property (nonatomic, strong) CALayer *layer1;
#property (nonatomic, strong) CALayer *layer2;
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
CALayer *layer = [[CALayer alloc] init];
layer.frame = (CGRect) { 10, 10, 100, 100 };
[self.view.layer addSublayer:layer];
layer.backgroundColor = [UIColor redColor].CGColor;
self.layer1 = layer;
layer = [[CALayer alloc] init];
layer.frame = (CGRect) { 60, 60, 100, 100 };
[self.view.layer addSublayer:layer];
layer.backgroundColor = [UIColor blueColor].CGColor;
self.layer2 = layer;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
CGPoint touchPoint = [(UITouch*)[touches anyObject] locationInView:self.view];
CALayer *touchedLayer = [self.view.layer.presentationLayer hitTest:touchPoint];
CALayer *originalLayer = [touchedLayer modelLayer];
if (originalLayer == self.layer1) { NSLog(#"layer 1 touched"); }
if (originalLayer == self.layer2) { NSLog(#"layer 2 touched"); }
for ( CALayer *layer in #[self.layer1, self.layer2])
{
CGPoint point = [layer convertPoint:touchPoint fromLayer:self.view.layer];
NSLog(#"layer %# containsPoint ? %#",layer, [layer containsPoint:point]? #"YES":#"NO");
NSLog(#"original point : %# | converted point : %#",NSStringFromCGPoint(touchPoint),NSStringFromCGPoint(point));
}
}
#end
Or you can try using CAShapeLayers & match with the CGPath of ShapeLayer to see if its touched like the example below ..
#interface ViewController ()
#property (nonatomic, strong) CALayer *layer1;
#property (nonatomic, strong) CALayer *layer2;
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
CAShapeLayer *shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.path = [UIBezierPath bezierPathWithRect:(CGRect){ 0,0,100,10 }].CGPath;
shapeLayer.backgroundColor = [UIColor redColor].CGColor;
shapeLayer.frame = (CGRect){20, 20, 100, 100};
[self.view.layer addSublayer:shapeLayer];
self.layer1 = shapeLayer;
shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.path = [UIBezierPath bezierPathWithRect:(CGRect){ 0,0,10,100 }].CGPath;
shapeLayer.backgroundColor = [[UIColor blueColor] colorWithAlphaComponent:0.6].CGColor;
shapeLayer.frame = (CGRect){60, 60, 100, 100};
[self.view.layer addSublayer:shapeLayer];
self.layer2 = shapeLayer;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
CGPoint touchPoint = [(UITouch*)[touches anyObject] locationInView:self.view];
CALayer *touchedLayer = [self.view.layer.presentationLayer hitTest:touchPoint];
CALayer *originalLayer = [touchedLayer modelLayer];
for ( CAShapeLayer *layer in #[self.layer1, self.layer2])
{
if (![layer isKindOfClass:[CAShapeLayer class]]) { continue; }
CGPoint point = [layer convertPoint:touchPoint fromLayer:self.view.layer];
if (CGPathContainsPoint(layer.path, 0, point, 0))
{
NSLog(#"match found");
if (originalLayer == self.layer1) { NSLog(#"layer 1 touched"); }
if (originalLayer == self.layer2) { NSLog(#"layer 2 touched"); }
return;
}
}
}

touchesMoved drawing in CAShapeLayer slow/laggy

As was suggested to me in a prior StackOverflow question, I'm trying to improve my drawing method for letting my user draw lines/dots into a UIView. I'm now trying to draw using a CAShapeLayer instead of dispatch_async. This all works correctly, however, drawing into the CAShapeLayer continuously while touches are moving becomes slow and the path lags behind, whereas my old (inefficient I was told) code worked beautifully smooth and fast. You can see my old code commented out below.
Is there any way to improve the performance for what I want to do? Maybe I'm overthinking something.
I'd appreciate any help offered.
Code:
#property (nonatomic, assign) NSInteger center;
#property (nonatomic, strong) CAShapeLayer *drawLayer;
#property (nonatomic, strong) UIBezierPath *drawPath;
#property (nonatomic, strong) UIView *drawView;
#property (nonatomic, strong) UIImageView *drawingImageView;
CGPoint points[4];
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.center = 0;
points[0] = [touch locationInView:self.drawView];
if (!self.drawLayer)
{
CAShapeLayer *layer = [CAShapeLayer layer];
layer.lineWidth = 3.0;
layer.lineCap = kCALineCapRound;
layer.strokeColor = self.inkColor.CGColor;
layer.fillColor = [[UIColor clearColor] CGColor];
[self.drawView.layer addSublayer:layer];
self.drawView.layer.masksToBounds = YES;
self.drawLayer = layer;
}
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.center++;
points[self.center] = [touch locationInView:self.drawView];
if (self.center == 3)
{
UIBezierPath *path = [UIBezierPath bezierPath];
points[2] = CGPointMake((points[1].x + points[3].x)/2.0, (points[1].y + points[3].y)/2.0);
[path moveToPoint:points[0]];
[path addQuadCurveToPoint:points[2] controlPoint:points[1]];
points[0] = points[2];
points[1] = points[3];
self.center = 1;
[self drawWithPath:path];
}
}
- (void)drawWithPath:(UIBezierPath *)path
{
if (!self.drawPath)
{
self.drawPath = [UIBezierPath bezierPath];
}
[self.drawPath appendPath:path];
self.drawLayer.path = self.drawPath.CGPath;
[self.drawLayer setNeedsDisplay];
// Below code worked faster and didn't lag behind at all really
/*
dispatch_async(dispatch_get_main_queue(),
^{
UIGraphicsBeginImageContextWithOptions(self.drawingImageView.bounds.size, NO, 0.0);
[self.drawingImageView.image drawAtPoint:CGPointZero];
[self.inkColor setStroke];
[path stroke];
self.drawingImageView.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
});
*/
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
if (self.center == 0)
{
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:points[0]];
[path addLineToPoint:points[0]];
[self drawWithPath:path];
}
self.drawLayer = nil;
self.drawPath = nil;
}
This problem intrigued me as I've always found UIBezierPath/shapeLayer to be reletivly fast.
It's important to note that in your code above, you continues to add points to drawPath. As this increases, the appendPath method becomes a real resource burden. Similarly, there is no point in successively rendering the same points over and over.
As a side note, there is a visible performance difference when increasing lineWidth and adding lineCap (regardless of approach). For the sake of comparing Apples with Apples, in the test below, I've left both to default values.
I took your above code and changed it a little. The technique I've used is to add touchPoints to the BezierPath up to a per-determined number, before committing the current rendering to image. This is similar to your original approach, however, given that it's not happening with every touchEvent. it's far less CPU intensive. I tested both approaches on the slowest device I have (iPhone 4S) and noted that CPU utilization on your initial implementation was consistently around 75-80% whilst drawing. Whilst with the modified/CAShapeLayer approach, CPU utilization was consistently around 10-15% Memory usage also remained minimal with the second approach.
Below is the Code I used;
#interface MDtestView () // a simple UIView Subclass
#property (nonatomic, assign) NSInteger cPos;
#property (nonatomic, strong) CAShapeLayer *drawLayer;
#property (nonatomic, strong) UIBezierPath *drawPath;
#property (nonatomic, strong) NSMutableArray *bezierPoints;
#property (nonatomic, assign) NSInteger pointCount;
#property (nonatomic, strong) UIImageView *drawingImageView;
#end
#implementation MDtestView
CGPoint points[4];
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
}
return self;
}
-(id)initWithCoder:(NSCoder *)aDecoder{
self = [super initWithCoder:aDecoder];
if (self) {
//
}
return self;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.cPos = 0;
points[0] = [touch locationInView:self];
if (!self.drawLayer)
{
// this should be elsewhere but kept it here to follow your code
self.drawLayer = [CAShapeLayer layer];
self.drawLayer.backgroundColor = [UIColor clearColor].CGColor;
self.drawLayer.anchorPoint = CGPointZero;
self.drawLayer.frame = self.frame;
//self.drawLayer.lineWidth = 3.0;
// self.drawLayer.lineCap = kCALineCapRound;
self.drawLayer.strokeColor = [UIColor redColor].CGColor;
self.drawLayer.fillColor = [[UIColor clearColor] CGColor];
[self.layer insertSublayer:self.drawLayer above:self.layer ];
self.drawingImageView = [UIImageView new];
self.drawingImageView.frame = self.frame;
[self addSubview:self.drawingImageView];
}
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
if (!self.drawPath)
{
self.drawPath = [UIBezierPath bezierPath];
// self.drawPath.lineWidth = 3.0;
// self.drawPath.lineCapStyle = kCGLineCapRound;
}
// grab the current time for testing Path creation and appending
CFAbsoluteTime cTime = CFAbsoluteTimeGetCurrent();
self.cPos++;
//points[self.cPos] = [touch locationInView:self.drawView];
points[self.cPos] = [touch locationInView:self];
if (self.cPos == 3)
{
/* uncomment this block to test old method
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:points[0]];
points[2] = CGPointMake((points[1].x + points[3].x)/2.0, (points[1].y + points[3].y)/2.0);
[path addQuadCurveToPoint:points[2] controlPoint:points[1]];
points[0] = points[2];
points[1] = points[3];
self.cPos = 1;
dispatch_async(dispatch_get_main_queue(),
^{
UIGraphicsBeginImageContextWithOptions(self.drawingImageView.bounds.size, NO, 0.0);
[self.drawingImageView.image drawAtPoint:CGPointZero];
// path.lineWidth = 3.0;
// path.lineCapStyle = kCGLineCapRound;
[[UIColor redColor] setStroke];
[path stroke];
self.drawingImageView.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"it took %.2fms to draw via dispatchAsync", 1000.0*(CFAbsoluteTimeGetCurrent() - cTime));
});
*/
// I've kept the original structure in place, whilst comparing apples for apples. we really don't need to create
// a new bezier path and append it. We can simply add the points to the global drawPath, and zero it at an
// appropriate point. This would also eliviate the need for appendPath
// /*
[self.drawPath moveToPoint:points[0]];
points[2] = CGPointMake((points[1].x + points[3].x)/2.0, (points[1].y + points[3].y)/2.0);
[self.drawPath addQuadCurveToPoint:points[2] controlPoint:points[1]];
points[0] = points[2];
points[1] = points[3];
self.cPos = 1;
self.drawLayer.path = self.drawPath.CGPath;
NSLog(#"it took %.2fms to render %i bezier points", 1000.0*(CFAbsoluteTimeGetCurrent() - cTime), self.pointCount);
// 1 point for MoveToPoint, and 2 points for addQuadCurve
self.pointCount += 3;
if (self.pointCount > 100) {
self.pointCount = 0;
[self commitCurrentRendering];
}
// */
}
}
- (void)commitCurrentRendering{
CFAbsoluteTime cTime = CFAbsoluteTimeGetCurrent();
#synchronized(self){
CGRect paintLayerBounds = self.drawLayer.frame;
UIGraphicsBeginImageContextWithOptions(paintLayerBounds.size, NO, [[UIScreen mainScreen]scale]);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(context, kCGBlendModeCopy);
[self.layer renderInContext:context];
CGContextSetBlendMode(context, kCGBlendModeNormal);
[self.drawLayer renderInContext:context];
UIImage *previousPaint = UIGraphicsGetImageFromCurrentImageContext();
self.layer.contents = (__bridge id)(previousPaint.CGImage);
UIGraphicsEndImageContext();
[self.drawPath removeAllPoints];
}
NSLog(#"it took %.2fms to save the context", 1000.0*(CFAbsoluteTimeGetCurrent() - cTime));
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
if (self.cPos == 0)
{
/* //not needed
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:points[0]];
[path addLineToPoint:points[0]];
[self drawWithPath:path];
*/
}
if (self.cPos == 2) {
[self commitCurrentRendering];
}
// self.drawLayer = nil;
[self.drawPath removeAllPoints];
}
#end

Resources