Overriding drawRect for drawing app - ios

I am building a demo drawing application. I am using touchesMoved:withEvent to collect my points and adding them to a CGMutablePathRef. To stroke the path, I override DrawRect, add the path to the context and stroke the path:
override func drawRect(rect: CGRect) {
self.backgroundColor?.set()
UIRectFill(rect)
let context : CGContextRef = UIGraphicsGetCurrentContext()
for line in pathArray {
CGContextAddPath(context, line.structPath)
CGContextSetLineWidth(context, line.structLineWidth)
CGContextSetStrokeColorWithColor(context, line.structLineColor.CGColor)
CGContextSetAlpha(context, lineOpacity)
}
CGContextSetLineCap(context, kCGLineCapRound)
CGContextStrokePath(context)
self.empty = false
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
if let touch = touches.first as UITouch! {
previousPoint = touch.previousLocationInView(self)
previousPreviousPoint = touch.previousLocationInView(self)
currentPoint = touch.locationInView(self)
}
self.touchesMoved(touches, withEvent: event)
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
if let touch = touches.first as UITouch! {
previousPreviousPoint = previousPoint
previousPoint = touch.previousLocationInView(self)
currentPoint = touch.locationInView(self)
let mid1 : CGPoint = getMidPoint(previousPoint, p2: previousPreviousPoint)
let mid2 : CGPoint = getMidPoint(currentPoint, p2: previousPoint)
let subpath : CGMutablePathRef = CGPathCreateMutable()
CGPathMoveToPoint(subpath, nil, mid1.x, mid1.y)
CGPathAddQuadCurveToPoint(subpath, nil, previousPoint.x, previousPoint.y, mid2.x, mid2.y)
let bounds : CGRect = CGPathGetBoundingBox(subpath)
let drawBox : CGRect = CGRectInset(bounds, -2.0 * lineWidth, -2.0 * lineWidth)
let newLine = line(newPath: subpath)
pathArray.append(newLine)
self.setNeedsDisplayInRect(drawBox)
}
}
The above code works as expected, except I am seeing a unexpected result. The "draw box" which gets the bandbox and sets a CGRectInset changes the lineColor of other paths already drawn:
I understand (sort of) why this is happening, but cannot find a solution to this problem. Any suggestions would be most appreciated!

You want to stroke each path separately, like this:
override func drawRect(rect: CGRect) {
let context : CGContextRef = UIGraphicsGetCurrentContext()
// Set parameters in the context that are the same for all lines
CGContextSetLineCap(context, kCGLineCapRound)
CGContextSetAlpha(context, lineOpacity)
for line in pathArray {
// Set parameters in the context that are specific to this line
CGContextSetLineWidth(context, line.structLineWidth)
CGContextSetStrokeColorWithColor(context, line.structLineColor.CGColor)
// Add the line's path to the context's current path:
CGContextAddPath(context, line.structPath)
// And stroke it.
CGContextStrokePath(context)
// (This has the side effect of clearing the context's current path.)
}
}
The CGContext keeps track of a current path. Think of it as a CGMutablePath that you don't have direct access to. You can affect it by calling functions like CGContextAddPath or many others.
The CGContext's current path is not actually visible until you tell the context to draw it, by calling a function like CGContextStrokePath. At that time, the context uses the current path and the other values in the context (stroke color, alpha, line width, etc.) to figure out what to draw, and draws it.
Your code was adding each line's path into the current path, so you ended up with the current path containing several disconnected subpaths. Then you called CGContextStrokePath once at the end, and all of those subpaths were drawn using the values of the parameters that you most recently set. So you saw only the last line's width and color.

Related

Cannot find CGPoint in CGPath

I have the following function for drawing a line between two points:
override func draw(from fromPoint: CGPoint, to toPoint: CGPoint) {
let path = UIBezierPath()
path.move(to: fromPoint)
path.addLine(to: toPoint)
path.close()
layer.path = path.cgPath
layer.strokeColor = pencil.color.cgColor
layer.lineWidth = pencil.strokeSize
}
Which is being called in touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?)
This draws a line and works fine, the line draws itself on the correct screen position.
Then I have another functionality where the user can erase previous drawings, and I'm trying to get if a touch position, is contained by any of the drawn paths:
private func findLayer(in touch: UITouch) -> CAShapeLayer? {
let point = touch.location(in: view)
// all the drawn paths are contained in CAShapeLayers
// Circles and rectangle layers have a corresponding frame that contains the UIBezierPath.
if let hitLayer = view?.layer.hitTest(point) as? CAShapeLayer,
hitLayer.path?.contains(point) == true || hitLayer.frame.contains(point) {
return hitLayer
}
guard let sublayers = view?.layer.sublayers else { return nil }
// this extra check is for layers that dont have a frame (Lines, Angle and free drawing)
for layer in sublayers {
if let shapeLayer = layer as? CAShapeLayer,
shapeLayer.path?.contains(point) == true || shapeLayer.frame.contains(point) {
return shapeLayer
}
}
return nil
}
The problem is that when the user draws a line, the findLayer function doesn't ever return the Layer with the line, but it works perfectly when the user draws a circle or a rectangle.
I don't want to give Line drawings a frame, because then the hit box could be too big, and the user could delete the drawing even if the touch isn't near the real path.
How can a find if a touch point is part of a CAShapeLayer.path ?

CAShapeLayer. Does the line pass through the point?

I use CAShapeLayer in order to draw a line on the screen. In the method touchesEnded I want to check " Does the line pass through the point?". In my code when I press on the any part of the screen the method contains returns always true. Perhaps, I have problem in line.frame = (view?.bounds)!. How can I fix it?
Sorry for my bad English.
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first
let firstPosition = touch?.location(in: self)
if atPoint(firstPosition!) == lvl1 {
let firstPositionX = firstPosition?.x
let firstPositionY = frame.size.height - (firstPosition?.y)!
view?.layer.addSublayer(line)
line.lineWidth = 8
let color = #colorLiteral(red: 0.8078431487, green: 0.02745098062, blue: 0.3333333433, alpha: 1).cgColor
line.strokeColor = color
line.fillColor = nil
line.frame = (view?.bounds)!
path.move(to: CGPoint(x: firstPositionX!, y: firstPositionY))
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first
let firstPosition = touch?.location(in: self)
if atPoint(firstPosition!) == lvl1 {
let firstPositionX = firstPosition?.x
let firstPositionY = frame.size.height - (firstPosition?.y)!
path.addLine(to: CGPoint(x: firstPositionX!, y: firstPositionY))
line.path = path.cgPath
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if line.contains(screenCenterPoint) {
print("ok")
}
}
Problem
The method func contains(_ p: CGPoint) -> Bool of CAShapeLayer returns true if the bounds of the layer contains the point. (see documentation)
So you cannot use this to check if the line contains a point.
There is however, another method with the same name in the class CGPath that returns whether the specified point is interior to the path. But since you only stroke your path and you don't fill the interior, this method will not give the desired result either.
Solution
The trick is to create an outline of your path using:
let outline = path.cgPath.copy(strokingWithWidth: line.lineWidth, lineCap: .butt, lineJoin: .round, miterLimit: 0)
And then check if the interior of the outline contains your screenCenterPoint
if outline.contains(screenCenterPoint) {
print("ok")
}
Performance considerations
Since you are checking the containment only when touches end, I think that creating an outline of the path does not add too much overhead.
When you want to check the containment in realtime, for example inside the touchesMoved function, calculating an outline may produce some overhead because this method is called a lot of times per second. Also the longer the path becomes, the longer it will take to calculate the outline.
So in realtime it is better to generate only the outline of the last drawn segment and then check if that outline contains your point.
If you want to reduce overhead seriously, you can write your own containment function. Containment of a point in a straight line is fairly simple and can be reduced to the following formula:
Given a line from start to end with width and a point p
Calculate:
dx = start.x - end.x
dy = start.y - end.y
a = dy * p.x - dx * p.y + end.x * start.y - end.y * start.x
b = hypot(dy, dx)
The line contains point p if:
abs(a/b) < width/2 and p is in the bounding box of the line.

Drawing on UIImageView within UIScrollView

About my app: The user can view a PDF file within a UIWebView. I have an option for the user to choose whether they want to scroll through the pdf or take notes on it. When they take notes, the scrolling is disabled, and vice-versa. However, when the user is drawing, the lines move up and become hazy as shown:
(The red boxes are text within the pdf)
Here is my code:
Switching between the pen and scroll:
var usingPen = false
#IBAction func usePen(sender: AnyObject) {
usingPen = true
webView.userInteractionEnabled = false
UIView.animateWithDuration(0.3) { () -> Void in
self.popUpView.alpha = 0
}
}
#IBAction func useScroll(sender: AnyObject) {
usingPen = false
webView.userInteractionEnabled = true
UIView.animateWithDuration(0.3) { () -> Void in
self.popUpView.alpha = 0
}
}
The imageView the user draws on (objectView):
var objectView = UIImageView()
override func viewDidAppear(animated: Bool) {
objectView.frame.size = webView.scrollView.contentSize
webView.scrollView.addSubview(objectView)
}
Drawing on the image view:
var start = CGPoint()
let size: CGFloat = 3
var color = UIColor.blackColor()
func draw(start: CGPoint, end: CGPoint) {
if usingPen == true {
UIGraphicsBeginImageContext(self.objectView.frame.size)
let context = UIGraphicsGetCurrentContext()
objectView.image?.drawInRect(CGRect(x: 0, y: 0, width: objectView.frame.width, height: objectView.frame.height))
CGContextSetFillColorWithColor(context, color.CGColor)
CGContextSetStrokeColorWithColor(context, color.CGColor)
CGContextSetLineWidth(context, size)
CGContextBeginPath(context)
CGContextMoveToPoint(context, start.x, start.y)
CGContextAddLineToPoint(context, end.x, end.y)
CGContextStrokePath(context)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
objectView.image = newImage
}
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
start = (touches.first?.locationInView(self.objectView))!
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
draw(start, end: (touches.first?.locationInView(self.objectView))!)
start = (touches.first?.locationInView(self.objectView))!
}
How can I prevent the haziness and movement of the drawings? Thanks for your help.
This probably occurs due to an image scaling issue. Since you're drawing into an image with a scale factor of 1 (which is the default) and your screen has a scale factor of 2 or 3, lines will continue to blur slightly every time they're copied and drawn. The solution is to specify the screen's scale when you create your image context:
UIGraphicsBeginImageContextWithOptions(self.objectView.frame.size, false, UIScreen.mainScreen().scale)
Note that the way you're drawing the line is fairly inefficient. Instead, you probably want to create a CGBitmapContext and continually draw to that; it'd be much faster and would also eliminate the "generation loss" problem you have.

Calling a Draw function in Swift

I have the following code in my Draw2D class which is attached to the view in the viewcontroller.
In the viewcontroller I have a var Drawing = Draw2D(). I have a button hooked up which performs the function "Check" with self.Drawing.Check(). The problem is I can't get the line on the screen. The ellipse works fine and function Check performs well as I can see with println().
What is wrong?
import UIKit
class Draw2D: UIView {
override func drawRect(rect: CGRect) {
let context = UIGraphicsGetCurrentContext()
CGContextSetLineWidth(context, 4.0)
CGContextSetStrokeColorWithColor(context, UIColor.blueColor().CGColor)
let rectangle = CGRectMake(60,170,200,80)
CGContextAddEllipseInRect(context, rectangle)
CGContextStrokePath(context)
}
func Check() {
let context = UIGraphicsGetCurrentContext()
CGContextSetLineWidth(context, 2.0)
CGContextMoveToPoint(context, CGFloat(100), CGFloat(300))
CGContextAddLineToPoint(context, CGFloat(100 + 200), CGFloat(300 + 100))
CGContextStrokePath(context)
}
}
Your call to Check() is outside of the regular rendering loop - I do not really know what UIGraphicsGetCurrentContext() actually returns in that case. I am guessing it will return nil:
The current graphics context is nil by default. Prior to calling its drawRect: method, view objects push a valid context onto the stack, making it current.
Anyway that would not change the fact that you cannot just call Check() and expect the line to be rendered. Assume for a second that the line was actually rendered: in the next rendering iteration you again only will do what is written inside drawRect and will therefore not display the line again.
What you have to do is create some kind of logic inside the drawRect to determine wether to call Check() or not.
A possible way to do this would be the following:
class Draw2D: UIView {
var callCheck:Bool? // changing it to Bool! or Bool will change the required check a few lines below
override func drawRect(rect: CGRect) {
let context = UIGraphicsGetCurrentContext()
CGContextSetLineWidth(context, 4.0)
CGContextSetStrokeColorWithColor(context, UIColor.blueColor().CGColor)
let rectangle = CGRectMake(60,170,200,80)
CGContextAddEllipseInRect(context, rectangle)
CGContextStrokePath(context)
if callCheck != nil && callCheck! {
Check()
}
}
func Check() {
let context = UIGraphicsGetCurrentContext()
CGContextSetLineWidth(context, 2.0)
CGContextMoveToPoint(context, CGFloat(100), CGFloat(300))
CGContextAddLineToPoint(context, CGFloat(100 + 200), CGFloat(300 + 100))
CGContextStrokePath(context)
}
}
Alter your IBAction:
#IBAction func Button1(sender: AnyObject) {
self.Drawing.callCheck = true
self.Drawing.setNeedsDisplay()
}

Allowing users to draw rect on a UIImage, with the intention of cropping the image

I'm sure this has been asked a number of times from various different perspectives, but I'm unable to find an answer on here as yet.
What I want to achieve
What I would like to do is to display a UIImage, and allow the user to draw a rectangle on the image, and eventually crop their selection.
Research so far
I've found previous questions here on SO that handle the cropping, however they often deal with static cropping areas that don't change, this does lead to the following constraints of such mechanism
The area of the image you're interested in may be positioned
anywhere, for example if you're trying to crop a road sign, it may
be centered on one image, but aligned left on another, therefore you
can't predict which area to crop.
The size and scale of the interested area may change, for example
one image may be a close up of that road sign so the cropping area
would be larger, but another image may have been taken from a
distance meaning the cropping area would be smaller.
With the combination of the above two variables, its almost impossible to accurately predict where the area of interest in the image would be, so I'm relying on the user to define this by being able to "draw" a box around the area we're interested in, in this case, a road sign.
This is all peachy on Android, since you can delegate all the hard work out with a nice intent, such as :
Intent intent = new Intent("com.android.camera.action.CROP");
However, I can't find an equivalent for iOS.
I've found this bit of code from this source :
- (UIImage *)imageByDrawingCircleOnImage:(UIImage *)image
{
// begin a graphics context of sufficient size
UIGraphicsBeginImageContext(image.size);
// draw original image into the context
[image drawAtPoint:CGPointZero];
// get the context for CoreGraphics
CGContextRef ctx = UIGraphicsGetCurrentContext();
// set stroking color and draw circle
[[UIColor redColor] setStroke];
// make circle rect 5 px from border
CGRect circleRect = CGRectMake(0, 0,
image.size.width,
image.size.height);
circleRect = CGRectInset(circleRect, 5, 5);
// draw circle
CGContextStrokeEllipseInRect(ctx, circleRect);
// make image out of bitmap context
UIImage *retImage = UIGraphicsGetImageFromCurrentImageContext();
// free the context
UIGraphicsEndImageContext();
return retImage;
}
Which I believe is a good starting point for cropping (it does crop circles however), it does rely on predefining the area you want to crop when calling CGRectMake.
This previous question also details how to do the actual cropping.
I'm assuming that to allow the user to draw the rect, I'd need to integrate with Gestures?
The Question :
How can I allow the user, to draw a rect over an image view, with the intent of cropping that area?
You could give BJImageCropper a try:
A simple UIView subclass that allows a user to crop an image. If you use it, I'd love to know! Twitter: #barrettjacobsen
This post is already 5 years old, but future references, this is how I managed to get it done. Following code is a combination of Rob's answer and some image cropping
Xcode 9 and Swift 4 are being used here
Add 2 ViewControllers
Add ImageView and 2 buttons for first view controller and another image view for last view controller
link all views to source file
View controller
import UIKit
extension UIView {
func snapshot(afterScreenUpdates: Bool = false) -> UIImage {
UIGraphicsBeginImageContextWithOptions(bounds.size, isOpaque, 0)
drawHierarchy(in: bounds, afterScreenUpdates: afterScreenUpdates)
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return image
}
}
extension UIImage {
func crop( rect: CGRect) -> UIImage {
var rect = rect
rect.origin.x*=self.scale
rect.origin.y*=self.scale
rect.size.width*=self.scale
rect.size.height*=self.scale
let imageRef = self.cgImage!.cropping(to: rect)
let image = UIImage(cgImage: imageRef!, scale: self.scale, orientation: self.imageOrientation)
return image
}
}
class ViewController: UIViewController {
var rec: CGRect!
var cropImage: UIImage!
#IBOutlet weak var imageView: UIImageView!
private let shapeLayer: CAShapeLayer = {
let _shapeLayer = CAShapeLayer()
_shapeLayer.fillColor = UIColor.clear.cgColor
_shapeLayer.strokeColor = UIColor.green.cgColor
_shapeLayer.lineWidth = 2
return _shapeLayer
}()
private var startPoint: CGPoint!
override func viewDidLoad() {
super.viewDidLoad()
imageView.layer.addSublayer(shapeLayer)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
clear()
startPoint = touches.first?.location(in: imageView)
}
func clear() {
imageView.layer.sublayers = nil
imageView.image = UIImage(named: "aa")
imageView.layer.addSublayer(shapeLayer)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let startPoint = startPoint, let touch = touches.first else { return }
let point: CGPoint
if let predictedTouch = event?.predictedTouches(for: touch)?.last {
point = predictedTouch.location(in: imageView)
} else {
point = touch.location(in: imageView)
}
updatePath(from: startPoint, to: point)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let startPoint = startPoint, let touch = touches.first else { return }
let point = touch.location(in: imageView)
updatePath(from: startPoint, to: point)
imageView.image = imageView.snapshot(afterScreenUpdates: true)
shapeLayer.path = nil
}
override func touchesCancelled(_ touches: Set<UITouch>, with event: UIEvent?) {
shapeLayer.path = nil
}
private func updatePath(from startPoint: CGPoint, to point: CGPoint) {
let size = CGSize(width: point.x - startPoint.x, height: point.y - startPoint.y)
rec = CGRect(origin: startPoint, size: size)
shapeLayer.path = UIBezierPath(rect: rec).cgPath
}
#IBAction func btnTapped(_ sender: Any) {
clear()
}
#IBAction func btnCropTapped(_ sender: Any) {
let newRec = CGRect(origin: CGPoint(x: rec.origin.x, y: rec.origin.y), size: rec.size)
cropImage = imageView.image?.crop(rect: newRec)
print(rec)
print(newRec)
self.performSegue(withIdentifier: "toImage", sender: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toImage" {
if let destination = segue.destination as? ImageViewController {
destination.croppedImage = cropImage
}
}
}
}

Resources