changing size of CALayer also changes size of previously added layer's size - uiview

i'm trying to draw over UIImageView with this custome class and i would like to be able to change the size and color of lines being drawn by user. But every time i change the size or color of CALayer Previously added layer's color and size also changes then.
class DrawingImageView: UIImageView {
private lazy var path = UIBezierPath()
private lazy var previousTouchPoint = CGPoint.zero
var strokeColor: CGColor!
var strokeWidth: CGFloat!
override init(frame: CGRect) {
super.init(frame: frame)
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event)
let shapeLayer = CAShapeLayer()
shapeLayer.lineWidth = strokeWidth ?? 4
shapeLayer.strokeColor = strokeColor ?? UIColor.black.cgColor
shapeLayer.lineCap = .round
shapeLayer.lineJoin = .round
layer.addSublayer(shapeLayer)
if let location = touches.first?.location(in: self) { previousTouchPoint = location }
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
if let location = touches.first?.location(in: self) {
path.move(to: location)
path.addLine(to: previousTouchPoint)
previousTouchPoint = location
let layerA = layer.sublayers?.last as! CAShapeLayer
layerA.path = path.cgPath
}
}
}
this is how i assign that class to tempImageView
let mainImageView = UIImageView()
var tempImgView: DrawingImageView?
mainImageView.frame = CGRect(x: 0, y: self.view.bounds.height / 7, width: imageWidth, height: imageHieght)
mainImageView.contentMode = .scaleAspectFit
mainImageView.layer.zPosition = 2
mainImageView.isOpaque = false
mainImageView.backgroundColor = .clear
mainImageView.isUserInteractionEnabled = true
mainImageView.tintColor = .clear
self.view.addSubview(mainImageView)
func DrawOverImage() {
mainImageView.isHidden = true
let drawnImageView = DrawingImageView(frame: mainImageView.frame)
drawnImageView.image = mainImageView.image
drawnImageView.contentMode = .scaleAspectFit
drawnImageView.isUserInteractionEnabled = true
drawnImageView.layer.zPosition = 3
self.tempImgView?.isUserInteractionEnabled = true
self.tempImgView = drawnImageView
self.view.addSubview(tempImgView!)
}
i also tried creating array of CAShapeLayers and add layers to an array and then assign replace subLayer like View.layer.subLayers[0] = layers[0] but that also didn't work and i couldn't find any useful resource online.
any help or suggestion would be really appreciated, thanks.

That is because you are continuing the same path each time and so the latest stroke color and width gets applied to the whole path.
Without looking at much of your other logic, I believe you should initialize a new path each time in your touchesBegan and this new path should be used in touchesMoved
So in touchesBegan, try adding path = UIBezierPath() after layer.addSublayer(shapeLayer) and see if this gives you what you are looking for

Related

CALayer drawing outside of view

I have this behaviour with this code:
import UIKit
class Page: UIView {
var bezierMemory = [BezierRecord]()
var currentBezier: UIBezierPath = UIBezierPath()
var firstPoint: CGPoint = CGPoint()
var previousPoint: CGPoint = CGPoint()
var morePreviousPoint: CGPoint = CGPoint()
var previousCALayer: CALayer = CALayer()
var pointCounter = 0
var selectedPen: Pen = Pen(width: 3.0, strokeOpacity: 1, strokeColor: .red, fillColor: .init(gray: 0, alpha: 0.5), isPencil: true, connectsToStart: true, fillPencil: true)
enum StandardPageSizes {
case A4, LEGAL, LETTER
}
var firstCALayer = true
var pointsTotal = 0
override init(frame: CGRect) {
super.init(frame: .zero)
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event)
guard let touch = touches.first else { return }
let point = touch.location(in: self)
firstPoint = point
pointCounter = 1
currentBezier = UIBezierPath()
currentBezier.lineWidth = selectedPen.width
selectedPen.getStroke().setStroke()
currentBezier.move(to: point)
previousPoint = point
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
guard let touch = touches.first else { return }
let point = touch.location(in: self)
pointCounter += 1
if (pointCounter == 3) {
let midpoint = CGPoint(x: (morePreviousPoint.x + point.x)/2.0, y: (morePreviousPoint.y + point.y)/2.0)
currentBezier.addQuadCurve(to: midpoint, controlPoint: morePreviousPoint)
let updatedCALayer = CAShapeLayer()
updatedCALayer.path = currentBezier.cgPath
updatedCALayer.lineWidth = selectedPen.width
updatedCALayer.opacity = selectedPen.strokeOpacity
updatedCALayer.strokeColor = selectedPen.getStroke().cgColor
updatedCALayer.fillColor = selectedPen.getFill()
if (firstCALayer) {
layer.addSublayer(updatedCALayer)
firstCALayer = false
} else {
layer.replaceSublayer(previousCALayer, with: updatedCALayer)
}
previousCALayer = updatedCALayer
pointCounter = 1
}
morePreviousPoint = previousPoint
previousPoint = point
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesEnded(touches, with: event)
guard let touch = touches.first else { return }
let point = touch.location(in: self)
if (pointCounter != 3) {
if (selectedPen.connectsToStart) {
currentBezier.addQuadCurve(to: firstPoint, controlPoint: previousPoint)
} else {
currentBezier.addQuadCurve(to: point, controlPoint: previousPoint)
}
let updatedCALayer = CAShapeLayer()
updatedCALayer.path = currentBezier.cgPath
updatedCALayer.lineWidth = selectedPen.width
updatedCALayer.opacity = selectedPen.strokeOpacity
updatedCALayer.strokeColor = selectedPen.getStroke().cgColor
updatedCALayer.fillColor = selectedPen.getFill()
if (firstCALayer) {
layer.addSublayer(updatedCALayer)
firstCALayer = false
} else {
// layer.setNeedsDisplay()
layer.replaceSublayer(previousCALayer, with: updatedCALayer)
}
}
firstCALayer = true
let bezierRecord = BezierRecord(bezier: currentBezier, strokeColor: selectedPen.getStroke(), fillColor: selectedPen.getFill(), strokeWidth: selectedPen.width)
bezierMemory.append(bezierRecord)
}
private func normPoint(point: CGPoint) -> CGPoint {
return CGPoint(x: point.x/frame.width, y: point.y/frame.height)
}
public class BezierRecord {
var bezier: UIBezierPath
var strokeColor: UIColor
var strokeWidth: CGFloat
var fillColor: CGColor
init(bezier: UIBezierPath, strokeColor: UIColor, fillColor: CGColor, strokeWidth: CGFloat) {
self.bezier = bezier
self.strokeColor = strokeColor
self.strokeWidth = strokeWidth
self.fillColor = fillColor
}
}
}
Really the only relevant parts are touchesMoved and touchesEnded, where CALayer's are dealt with. As you can see from the gif, I can draw outside the bounds of the Page (UIView) as long as I start drawing inside the bounds. I do not want this - what I would like is something where you can maintain a stroke outside of the bounds of the Page (as long as you start it on the Page), but the stroke wont appear outside of the Page. Any ideas?
EDIT: I should add that for these UIBezierCurves, (0, 0) is considered to be the top left of the Page (white), and not the entire view. Thus, for example, beziers that start on the page and continue on, are negative.
All you should need to do is set .clipsToBounds = true on the view.
One approach:
override init(frame: CGRect) {
super.init(frame: .zero)
commonInit()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
commonInit()
}
func commonInit() {
self.clipsToBounds = true
}
You could put self.clipsToBounds = true in both of the init funcs, but it is common practice (no pun intended) to add a "common init" func like this (it can be named whatever... this is how I do it). Frequently we have other "initial setup" code that we want called, and this avoids duplicating the code.

Delay when adding lines to a UIBezierPath in CAShapeLayer

I am trying to implement functionality to allow a user to draw in a UIImageView. My current code is as follows:
class DrawImageView: UIImageView {
// MARK: - Properties
private var lastPoint = CGPoint.zero
private var lineColor = UIColor.black
private var lineWidth: CGFloat = 10.0
private var opacity: CGFloat = 1.0
private var path: UIBezierPath?
private var swiped = false
private var tempShapeLayer: CAShapeLayer?
// MARK: - Initializers
override init(image: UIImage?) {
super.init(image: image)
isUserInteractionEnabled = true
clipsToBounds = true
}
#available(*, unavailable)
private init() {
fatalError("init() has not been implemented")
}
#available(*, unavailable)
private override init(frame: CGRect) {
fatalError("init(frame:) has not been implemented")
}
#available(*, unavailable)
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
// MARK: - Draw Methods
extension DrawImageView {
private func drawLine(from fromPoint: CGPoint, to toPoint: CGPoint) {
guard let tempShapeLayer = tempShapeLayer,
let cgPath = tempShapeLayer.path
else { return }
let path = UIBezierPath(cgPath: cgPath)
path.move(to: fromPoint)
path.addLine(to: toPoint)
tempShapeLayer.path = path.cgPath
setNeedsDisplay()
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else { return }
swiped = false
lastPoint = touch.location(in: self)
let shapeLayer = CAShapeLayer()
shapeLayer.lineCap = .round
shapeLayer.path = path?.cgPath
shapeLayer.strokeColor = lineColor.cgColor
shapeLayer.lineWidth = lineWidth
shapeLayer.fillColor = UIColor.clear.cgColor
shapeLayer.frame = bounds
let path = UIBezierPath()
shapeLayer.path = path.cgPath
layer.addSublayer(shapeLayer)
shapeLayer.fillColor = UIColor.red.cgColor
tempShapeLayer = shapeLayer
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else { return }
swiped = true
let currentPoint = touch.location(in: self)
drawLine(from: lastPoint, to: currentPoint)
lastPoint = currentPoint
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if !swiped {
// Draw a single point
drawLine(from: lastPoint, to: lastPoint)
}
}
}
This image view is embedded inside a scroll view:
class ZoomImageVC: UIViewController {
// MARK: - Properties
private let bag = DisposeBag()
// MARK: - Views
private let scrollView = UIScrollView()
// Important this is init with image immediately
private let backgroundImageView = DrawImageView(image: UIImage(named: "test.jpg"))
// MARK: - View Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
setupViews()
}
override func viewWillLayoutSubviews() {
super.viewWillLayoutSubviews()
// This will update on rotate too
updateMinZoomScaleForSize(view.bounds.size)
}
// MARK: - Setup
private func setupViews() {
view.backgroundColor = .white
scrollView.delegate = self
view.addSubview(scrollView)
scrollView.snp.makeConstraints {
$0.edges.equalToSuperview()
}
scrollView.rx.didZoom
.subscribe(with: self, onNext: { `self`, _ in
self.updateConstraintsForSize(self.view.bounds.size)
})
.disposed(by: bag)
scrollView.addSubview(backgroundImageView)
// This is DrawingImageView
backgroundImageView.snp.makeConstraints {
$0.edges.equalToSuperview()
}
}
private func updateMinZoomScaleForSize(_ size: CGSize) {
let widthScale = size.width / backgroundImageView.bounds.width
let heightScale = size.height / backgroundImageView.bounds.height
let minScale = min(widthScale, heightScale)
scrollView.minimumZoomScale = minScale
scrollView.zoomScale = minScale
}
private func updateConstraintsForSize(_ size: CGSize) {
let yOffset = max(0, (size.height - backgroundImageView.frame.height) / 2.0)
let xOffset = max(0, (size.width - backgroundImageView.frame.width) / 2.0)
backgroundImageView.snp.remakeConstraints {
$0.leading.trailing.equalToSuperview().offset(xOffset)
$0.top.bottom.equalToSuperview().offset(yOffset)
}
view.layoutIfNeeded()
}
}
// MARK: - UIScrollViewDelegate
extension ZoomImageVC: UIScrollViewDelegate {
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
backgroundImageView
}
}
The code works fine however there seems to be a small delay. For example if I start stroking a path there will be a slight delay until the path appears, continuing to draw the path seems to be smooth. I can scribble the path very fast and it keeps up. It's just the start of the drawing that lags.
If I just paint a single dot to the image view without dragging there also seems to be a small delay from me tapping to it appearing on the screen.
If I remove DrawImageView from the scroll view there seems to be no delay. Why would there be a delay when this is in a scroll view?
Ran your code and not sure what "delay" you're seeing.
However, you're doing a few things that you don't need to do...
You can use a single path and add lines as the touch moves... no need to moveTo / addLineTo every time.
Also, if you want the initial "dot" to show up as soon as the touch begins, you can moveTo / addLineTo the touch point on touchesBegan.
Here's your class, edited to (hopefully) get rid of the delay you're experiencing:
class DrawImageView: UIImageView {
// MARK: - Properties
private var lineColor = UIColor.black
private var lineWidth: CGFloat = 10.0
private var opacity: CGFloat = 1.0
// let's make these non-optional
private var drawingPath: UIBezierPath!
private var drawingShapeLayer: CAShapeLayer!
// MARK: - Initializers
override init(image: UIImage?) {
super.init(image: image)
isUserInteractionEnabled = true
clipsToBounds = true
}
#available(*, unavailable)
private init() {
fatalError("init() has not been implemented")
}
#available(*, unavailable)
private override init(frame: CGRect) {
fatalError("init(frame:) has not been implemented")
}
#available(*, unavailable)
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
/// MARK: - Draw Methods
extension DrawImageView {
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else { return }
let thisPoint = touch.location(in: self)
// create new shape layer
drawingShapeLayer = CAShapeLayer()
drawingShapeLayer.lineCap = .round
drawingShapeLayer.strokeColor = lineColor.cgColor
drawingShapeLayer.lineWidth = lineWidth
drawingShapeLayer.fillColor = UIColor.clear.cgColor
drawingShapeLayer.frame = bounds
// create new path
drawingPath = UIBezierPath()
// move to touch point
drawingPath.move(to: thisPoint)
// if we want the line (an initial "dot")
// to show up immediately
// add line to same touch point
drawingPath.addLine(to: thisPoint)
// assign the shape layer path
drawingShapeLayer.path = drawingPath.cgPath
// add the layer
layer.addSublayer(drawingShapeLayer)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else { return }
let thisPoint = touch.location(in: self)
// add line to existing path
drawingPath.addLine(to: thisPoint)
// update path of shape layer
drawingShapeLayer.path = drawingPath.cgPath
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
// don't really need to do anything here
// unless you're taking some other action when
// touch ends
}
}

how to draw / doodle line on UIImage in swift?

I need to draw / doodle a line in UIImage like the picture above , I see a lot of tutorials doodle line on the UIView but not in the UIImage.
after the user doodle on the Image, I want to save it as the new image (Image that has lines). how do I do that in Swift?
i just can find draw line on UIView not in the UIImage
the for drawing on UIView is like this one in this youtube tutorial http://youtube.com/watch?v=gbFHqHHApC4 , I change it to inherit DrawView to UIImageView
struct Stroke {
let startPoint : CGPoint
let endPoint : CGPoint
let strokeColor : CGColor
}
class DrawView : UIImageView {
var isDrawing = false
var lastPoint : CGPoint!
var strokeColor : CGColor! = UIColor.black.cgColor
var strokes = [Stroke]()
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
guard !isDrawing else { return }
isDrawing = true
guard let touch = touches.first else {return}
let currentPoint = touch.location(in: self)
lastPoint = currentPoint
print(currentPoint)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard isDrawing else { return}
guard let touch = touches.first else {return}
let currentPoint = touch.location(in: self)
let stroke = Stroke(startPoint: lastPoint, endPoint: currentPoint, strokeColor: strokeColor)
strokes.append(stroke)
lastPoint = currentPoint
setNeedsDisplay()
print(currentPoint)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
guard isDrawing else { return}
isDrawing = false
guard let touch = touches.first else {return}
let currentPoint = touch.location(in: self)
let stroke = Stroke(startPoint: lastPoint, endPoint: currentPoint, strokeColor: strokeColor)
strokes.append(stroke)
setNeedsDisplay()
lastPoint = nil
print(currentPoint)
}
override func draw(_ rect: CGRect) {
let context = UIGraphicsGetCurrentContext()
context?.setLineWidth(5)
context?.setLineCap(.round)
for stroke in strokes {
context?.beginPath()
context?.move(to: stroke.startPoint)
context?.addLine(to: stroke.endPoint)
context?.setStrokeColor(stroke.strokeColor)
context?.strokePath()
}
}
func erase() {
strokes = []
strokeColor = UIColor.black.cgColor
setNeedsDisplay()
}
}
I have assigned the UIImage in storyboard to use custom class "DrawView" like my code above, but I don't know why the lines doesn't appear on my UIImage
Details
Xcode 10.2.1 (10E1001), Swift 5
Solution
import UIKit
// https://stackoverflow.com/a/41009006/4488252
class DrawnImageView: UIImageView {
private lazy var path = UIBezierPath()
private lazy var previousTouchPoint = CGPoint.zero
private lazy var shapeLayer = CAShapeLayer()
override func awakeFromNib() {
super.awakeFromNib()
setupView()
}
override init(frame: CGRect) {
super.init(frame: frame)
setupView()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
func setupView(){
layer.addSublayer(shapeLayer)
shapeLayer.lineWidth = 4
shapeLayer.strokeColor = UIColor.white.cgColor
isUserInteractionEnabled = true
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event)
if let location = touches.first?.location(in: self) { previousTouchPoint = location }
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
if let location = touches.first?.location(in: self) {
path.move(to: location)
path.addLine(to: previousTouchPoint)
previousTouchPoint = location
shapeLayer.path = path.cgPath
}
}
}
// https://stackoverflow.com/a/40953026/4488252
extension UIView {
var screenShot: UIImage? {
let scale = UIScreen.main.scale
UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, scale)
if let context = UIGraphicsGetCurrentContext() {
layer.render(in: context)
let screenshot = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return screenshot
}
return nil
}
}
Usage
Add DrawnImageView to your root (parent) view (drawing by touch will be enabled automatically)
To save UIImage use drawingImageView.screenShot
Full sample
Do not forget to add the solution code here
import UIKit
class ViewController: UIViewController {
fileprivate weak var savedImageView: UIImageView?
fileprivate weak var drawnImageView: UIImageView?
override func viewDidLoad() {
super.viewDidLoad()
let drawnImageView = addImageView(image: UIImage(named: "swift")) as DrawnImageView
drawnImageView.topAnchor.constraint(equalTo: view.topAnchor).isActive = true
drawnImageView.heightAnchor.constraint(equalToConstant: UIScreen.main.bounds.height/3).isActive = true
self.drawnImageView = drawnImageView
let button = UIButton()
button.setTitle("Save Image", for: .normal)
button.translatesAutoresizingMaskIntoConstraints = false
button.setTitleColor(.blue, for: .normal)
view.addSubview(button)
button.topAnchor.constraint(equalTo: drawnImageView.bottomAnchor, constant: 60).isActive = true
button.centerXAnchor.constraint(equalTo: view.centerXAnchor).isActive = true
button.heightAnchor.constraint(equalToConstant: 44).isActive = true
button.addTarget(self, action: #selector(saveImageButtonTouchUpInside), for: .touchUpInside)
let savedImageView = addImageView()
savedImageView.topAnchor.constraint(equalTo: button.bottomAnchor, constant: 60).isActive = true
savedImageView.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true
self.savedImageView = savedImageView
}
private func addImageView<T: UIImageView>(image: UIImage? = nil) -> T {
let imageView = T(frame: .zero)
imageView.contentMode = .scaleAspectFit
imageView.image = image
view.addSubview(imageView)
imageView.translatesAutoresizingMaskIntoConstraints = false
imageView.leadingAnchor.constraint(equalTo: view.leadingAnchor).isActive = true
imageView.trailingAnchor.constraint(equalTo: view.trailingAnchor).isActive = true
return imageView
}
#objc func saveImageButtonTouchUpInside(sender: UIButton) {
savedImageView?.image = drawnImageView?.screenShot
}
}
Results
You can put your UIImageView in background and UIView at top of it and then capture UIView as Image and merge it.
Refer this to capture UIView: How to capture UIView to UIImage without loss of quality on retina display
And then Merge it using :
func mergeImages (forgroundImage : UIImage, backgroundImage : UIImage, size : CGSize) -> UIImage {
let bottomImage = backgroundImage
UIGraphicsBeginImageContext(size)
let areaSize = CGRect(x: 0, y: 0, width: size.width, height: size.height)
bottomImage.draw(in: areaSize)
let topImage = forgroundImage
topImage.draw(in: areaSize, blendMode: .normal, alpha: 1.0)
let newImage:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return newImage
}
This may help you . Refer the link
How do I draw on an image in Swift?
Make an image graphics context. (Before iOS 10, you would do this by calling UIGraphicsBeginImageContextWithOptions. In iOS 10 there's another way, UIGraphicsImageRenderer, but you don't have to use it if you don't want to.)
Draw (i.e. copy) the image into the context. (UIImage actually has draw... methods for this very purpose.)
Draw your line into the context. (There are CGContext functions for this.)
Extract the resulting image from the context. (For example, if you used UIGraphicsBeginImageContextWithOptions, you would use UIGraphicsGetImageFromCurrentImageContext.) Then close the context.

CAShapeLayer(layer) move one place to another place using Gesture in Swift

Here bellow my code I want move shapeLayer(layer) using gesture.
let shapeLayer = CAShapeLayer()
let ellipsePath = UIBezierPath(ovalInRect: CGRectMake(100, 200, width , height))
shapeLayer.path = ellipsePath.CGPath
shapeLayer.fillColor = UIColor.clearColor().CGColor
shapeLayer.strokeColor = UIColor.blueColor().CGColor
shapeLayer.lineWidth = 1
shapeLayer.sublayers?.removeAll()
self.view.layer.addSublayer(shapeLayer)
I am using bellow touch function for identify layer but did not work.
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?)
{
let point = (touches.first! as UITouch).locationInView(self.view) // Where you pressed
if let layer = self.view.layer.hitTest(point) as? CAShapeLayer { // If you hit a layer and if its a Shapelayer
if CGPathContainsPoint(layer.path, nil, point, false) { // Optional, if you are inside its content path
print("Hit shapeLayer") // Do something
}
}
}

Recognizing touch events only inside the masked view

I am creating this structured through masks:
Each hexagon should be clickable. This is the code I used:
// To create one masked hexagun
let hex = UIImage(named: "hexum")
let mask = CALayer()
mask.contents = hexum!.CGImage
mask.frame = CGRectMake(0, 0, hex!.size.width, hex!.size.height)
let img = UIImageView(image: UIImage(named: "img"))
img.layer.mask = mask
img.layer.masksToBounds = true
// Gesture Recognizer
let singleTap = UITapGestureRecognizer(target: self, action: "tapDetected")
singleTap.numberOfTapsRequired = 1
img.addGestureRecognizer(singleTap)
img.userInteractionEnabled = true
func tapDetected() {
print("Clicked!")
}
The problem is that the click region is larger than the mask, which will cause the inconvenience of a region overlapping each other. Something like this:
The yellow border shows the clickable region (not actually visible)
I am a beginner, it may be a trivial problem, but can you help me solve? Thank you.
If you want to do this perfectly, use the UIGestureRecognizerDelegate method gestureRecognizer(gesture, shouldReceiveTouch: touch) -> Bool. You will need to map the given gesture recogniser to a particular hexagon and then do pixel precise hit-testing on the image for that hexagon. This latter part is achieved by rendering the mask image to a graphics context and finding the pixel at the point corresponding to the touch location.
However, this is likely overkill. You can simplify the problem by hit-testing each shape as a circle, not a hexagon. The circle shape roughly approximates the hexagon so it will work almost the same for a user and avoids messy pixel-level alpha equality. The inaccuracy of touch input will cover up the inaccurate regions.
Another option is to rework your views to be based on CAShapeLayer masks. CAShapeLayer includes a path property. Bezier paths in UIKit include their own rolled versions of path-contains-point methods so you can just use that for this purpose.
You can adopt the UIGestureRecognizerDelegate protocol, and implement the gestureRecognizer(_:shouldReceiveTouch:) method to further constrain whether or not a gesture should fire. The link suggested by #tnylee would be a good place to start in terms of figuring out how to do such hit testing.
#Benjamin Mayo gave great options to resolve the issue. I ended up choosing the simplest, yet, efficient one: hit-testing each shape as a circle.
I'm putting the code that can help someone else:
class hexum: UIImageView {
var maskFrame: CGRect?
convenience init(mask: String, inside: String) {
// Mask things:
let masked = CALayer()
let img = UIImage(named: mask)
masked.contents = img?.CGImage
masked.frame = CGRectMake(x, y, img!.size.width, img!.size.height)
self.init(image: UIImage(named: inside))
self.layer.mask = masked
self.layer.masksToBounds = true
maskFrame = masked.frame
}
// The touch event things
// Here, I got help from #Matt in (http://stackoverflow.com/a/21081518/3462518):
override func pointInside(point: CGPoint, withEvent event: UIEvent?) -> Bool {
let p = UIBezierPath(ovalInRect: maskFrame!)
return p.containsPoint(point)
}
}
let firstOne = hexum(mask: "img1", inside: "img2")
let tap = UITapGestureRecognizer(target: self, action: "clicked")
tap.numberOfTapsRequired = 1
firstOne.userInteractionEnabled = true
firstOne.addGestureRecognizer(tap)
func clicked() {
...
}
Result:
Here is a Swift 3 HexagonImageView, tappable just within the hexagon:
First create a UIBezier path:
final class HexagonPath: UIBezierPath {
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
var sideLength: CGFloat = 100 {
didSet {
redrawPath()
}
}
override init() {
super.init()
redrawPath()
}
private func redrawPath() {
removeAllPoints()
let yDrop = sideLength / 2
move(to: CGPoint(x: sideLength, y: 0))
addLine(to: CGPoint(x: sideLength * 2, y: yDrop))
addLine(to: CGPoint(x: sideLength * 2, y: yDrop + sideLength))
addLine(to: CGPoint(x: sideLength, y: (yDrop * 2) + sideLength))
addLine(to: CGPoint(x: 0, y: yDrop + sideLength))
addLine(to: CGPoint(x: 0, y: yDrop ))
//addLine(to: CGPoint(x: sideLength, y: 0))
close()
}
}
Then create a Hexagon UIImageView:
class HexagonImageView: UIImageView {
let hexagonPath = HexagonPath()
var sideLength: CGFloat = 100 {
didSet {
hexagonPath.sideLength = sideLength
initilize()
}
}
init() {
super.init(frame: CGRect())
initilize()
}
override init(frame: CGRect) {
super.init(frame: frame)
initilize()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
initilize()
}
private func initilize() {
self.frame.size.width = sideLength * 2
self.frame.size.height = sideLength * 2
contentMode = .scaleAspectFill
mask(withPath: hexagonPath)
}
// MAKE THE TAP-HIT POINT JUST THE INNER PATH
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
return hexagonPath.contains(point)
}
}
Using this Extension:
extension UIView {
func mask(withRect rect: CGRect, inverse: Bool = false) {
let path = UIBezierPath(rect: rect)
let maskLayer = CAShapeLayer()
if inverse {
path.append(UIBezierPath(rect: self.bounds))
maskLayer.fillRule = kCAFillRuleEvenOdd
}
maskLayer.path = path.cgPath
self.layer.mask = maskLayer
}
func mask(withPath path: UIBezierPath, inverse: Bool = false) {
let path = path
let maskLayer = CAShapeLayer()
if inverse {
path.append(UIBezierPath(rect: self.bounds))
maskLayer.fillRule = kCAFillRuleEvenOdd
}
maskLayer.path = path.cgPath
self.layer.mask = maskLayer
}
}
Finally, you can use it like this in ViewController ViewDidLoad:
override func viewDidLoad() {
super.viewDidLoad()
let hexImageView = HexagonImageView()
hexImageView.image = UIImage(named: "hotcube")
hexImageView.sideLength = 100
let tap = UITapGestureRecognizer(target: self, action: #selector(imageTapped))
tap.numberOfTapsRequired = 1
hexImageView.isUserInteractionEnabled = true
hexImageView.addGestureRecognizer(tap)
view.addSubview(hexImageView)
}
func imageTapped() {
print("tapped")
}
I'm not sure it's the simplest and the rightest way but I'd
check the location of user's tap and override touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?)
.

Resources