Recognizing touch events only inside the masked view - ios

I am creating this structured through masks:
Each hexagon should be clickable. This is the code I used:
// To create one masked hexagun
let hex = UIImage(named: "hexum")
let mask = CALayer()
mask.contents = hexum!.CGImage
mask.frame = CGRectMake(0, 0, hex!.size.width, hex!.size.height)
let img = UIImageView(image: UIImage(named: "img"))
img.layer.mask = mask
img.layer.masksToBounds = true
// Gesture Recognizer
let singleTap = UITapGestureRecognizer(target: self, action: "tapDetected")
singleTap.numberOfTapsRequired = 1
img.addGestureRecognizer(singleTap)
img.userInteractionEnabled = true
func tapDetected() {
print("Clicked!")
}
The problem is that the click region is larger than the mask, which will cause the inconvenience of a region overlapping each other. Something like this:
The yellow border shows the clickable region (not actually visible)
I am a beginner, it may be a trivial problem, but can you help me solve? Thank you.

If you want to do this perfectly, use the UIGestureRecognizerDelegate method gestureRecognizer(gesture, shouldReceiveTouch: touch) -> Bool. You will need to map the given gesture recogniser to a particular hexagon and then do pixel precise hit-testing on the image for that hexagon. This latter part is achieved by rendering the mask image to a graphics context and finding the pixel at the point corresponding to the touch location.
However, this is likely overkill. You can simplify the problem by hit-testing each shape as a circle, not a hexagon. The circle shape roughly approximates the hexagon so it will work almost the same for a user and avoids messy pixel-level alpha equality. The inaccuracy of touch input will cover up the inaccurate regions.
Another option is to rework your views to be based on CAShapeLayer masks. CAShapeLayer includes a path property. Bezier paths in UIKit include their own rolled versions of path-contains-point methods so you can just use that for this purpose.

You can adopt the UIGestureRecognizerDelegate protocol, and implement the gestureRecognizer(_:shouldReceiveTouch:) method to further constrain whether or not a gesture should fire. The link suggested by #tnylee would be a good place to start in terms of figuring out how to do such hit testing.

#Benjamin Mayo gave great options to resolve the issue. I ended up choosing the simplest, yet, efficient one: hit-testing each shape as a circle.
I'm putting the code that can help someone else:
class hexum: UIImageView {
var maskFrame: CGRect?
convenience init(mask: String, inside: String) {
// Mask things:
let masked = CALayer()
let img = UIImage(named: mask)
masked.contents = img?.CGImage
masked.frame = CGRectMake(x, y, img!.size.width, img!.size.height)
self.init(image: UIImage(named: inside))
self.layer.mask = masked
self.layer.masksToBounds = true
maskFrame = masked.frame
}
// The touch event things
// Here, I got help from #Matt in (http://stackoverflow.com/a/21081518/3462518):
override func pointInside(point: CGPoint, withEvent event: UIEvent?) -> Bool {
let p = UIBezierPath(ovalInRect: maskFrame!)
return p.containsPoint(point)
}
}
let firstOne = hexum(mask: "img1", inside: "img2")
let tap = UITapGestureRecognizer(target: self, action: "clicked")
tap.numberOfTapsRequired = 1
firstOne.userInteractionEnabled = true
firstOne.addGestureRecognizer(tap)
func clicked() {
...
}
Result:

Here is a Swift 3 HexagonImageView, tappable just within the hexagon:
First create a UIBezier path:
final class HexagonPath: UIBezierPath {
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
var sideLength: CGFloat = 100 {
didSet {
redrawPath()
}
}
override init() {
super.init()
redrawPath()
}
private func redrawPath() {
removeAllPoints()
let yDrop = sideLength / 2
move(to: CGPoint(x: sideLength, y: 0))
addLine(to: CGPoint(x: sideLength * 2, y: yDrop))
addLine(to: CGPoint(x: sideLength * 2, y: yDrop + sideLength))
addLine(to: CGPoint(x: sideLength, y: (yDrop * 2) + sideLength))
addLine(to: CGPoint(x: 0, y: yDrop + sideLength))
addLine(to: CGPoint(x: 0, y: yDrop ))
//addLine(to: CGPoint(x: sideLength, y: 0))
close()
}
}
Then create a Hexagon UIImageView:
class HexagonImageView: UIImageView {
let hexagonPath = HexagonPath()
var sideLength: CGFloat = 100 {
didSet {
hexagonPath.sideLength = sideLength
initilize()
}
}
init() {
super.init(frame: CGRect())
initilize()
}
override init(frame: CGRect) {
super.init(frame: frame)
initilize()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
initilize()
}
private func initilize() {
self.frame.size.width = sideLength * 2
self.frame.size.height = sideLength * 2
contentMode = .scaleAspectFill
mask(withPath: hexagonPath)
}
// MAKE THE TAP-HIT POINT JUST THE INNER PATH
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
return hexagonPath.contains(point)
}
}
Using this Extension:
extension UIView {
func mask(withRect rect: CGRect, inverse: Bool = false) {
let path = UIBezierPath(rect: rect)
let maskLayer = CAShapeLayer()
if inverse {
path.append(UIBezierPath(rect: self.bounds))
maskLayer.fillRule = kCAFillRuleEvenOdd
}
maskLayer.path = path.cgPath
self.layer.mask = maskLayer
}
func mask(withPath path: UIBezierPath, inverse: Bool = false) {
let path = path
let maskLayer = CAShapeLayer()
if inverse {
path.append(UIBezierPath(rect: self.bounds))
maskLayer.fillRule = kCAFillRuleEvenOdd
}
maskLayer.path = path.cgPath
self.layer.mask = maskLayer
}
}
Finally, you can use it like this in ViewController ViewDidLoad:
override func viewDidLoad() {
super.viewDidLoad()
let hexImageView = HexagonImageView()
hexImageView.image = UIImage(named: "hotcube")
hexImageView.sideLength = 100
let tap = UITapGestureRecognizer(target: self, action: #selector(imageTapped))
tap.numberOfTapsRequired = 1
hexImageView.isUserInteractionEnabled = true
hexImageView.addGestureRecognizer(tap)
view.addSubview(hexImageView)
}
func imageTapped() {
print("tapped")
}

I'm not sure it's the simplest and the rightest way but I'd
check the location of user's tap and override touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?)
.

Related

Non-regular shaped UIView with hit testing (workable with Gestures) on iOS

I want to be able to create a view that I can assign a gesture to, that is irregular in shape and have the gesture only trigger when touched inside the irregular shape of the view.
My use case is that we put some slide-in options in a tableView cell when touching a button in the cell and we want to prohibit touching anything except those slide-in options when they are active, and touching anywhere else should just close them and restore normal access. So this irregular view is basically the whole screen with a box cut out of it where the active tableview cell is with the slide-in options and only touching that cell should be active and the rest trapped by the gesture which triggers a close of the options and dismissal of this special irregular touch-catching view.
It appears I need to make the view's hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? function return correctly and it should work. The problem is that my version of this function does not seem to work correctly and I am not sure why. Based on the even/odd test described in the docs, my debugging shows it should work.
The actual fill with background color of the view shows the correct stuff so it is just the hit test that seems to be off. I touch and my gesture ends up calling the hitTest which returns true when it seems that it should return false and in the debugger looking at the path shows that a line to the outside drawn from the point would pass two path lines.
My class drew inspiration from UIView with cut out segment and shadow and https://github.com/hackiftekhar/IQIrregularView
My class:
class CutView: UIView {
var rectsArray: [CGRect]?
convenience init(frame: CGRect, rectsArray: [CGRect]) {
self.init(frame: frame)
self.rectsArray = rectsArray
let path = UIBezierPath(rect: frame)
for holeRect in rectsArray {
path.append(UIBezierPath(rect: holeRect))
}
let maskLayer = CAShapeLayer()
maskLayer.frame = self.bounds
maskLayer.masksToBounds = false
path.usesEvenOddFillRule = true
maskLayer.path = path.cgPath
maskLayer.fillRule = .evenOdd
self.layer.mask = maskLayer
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if let shapeLayer = self.layer.mask as? CAShapeLayer, let contains = shapeLayer.path?.contains(point), contains {
return super.hitTest(point, with: event)
} else {
return nil
}
}
}
Specifically, in the debugger, the .contains(point) is returning true.
Thanks for any insight on this (or alternative ways of achieving what I want to achieve).
Getting the path from a CAShapeLayer gets a CGPath, so you need to use:
contains(_:using:transform:)
docs page
Here's a modified version of your class, along with an example controller:
class CutView: UIView {
var rectsArray: [CGRect]?
convenience init(frame: CGRect, rectsArray: [CGRect]) {
self.init(frame: frame)
self.rectsArray = rectsArray
let path = UIBezierPath(rect: frame)
for holeRect in rectsArray {
path.append(UIBezierPath(rect: holeRect))
}
let maskLayer = CAShapeLayer()
maskLayer.frame = self.bounds
maskLayer.masksToBounds = false
path.usesEvenOddFillRule = true
maskLayer.path = path.cgPath
maskLayer.fillRule = .evenOdd
self.layer.mask = maskLayer
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if !self.frame.contains(point) {
print("point is outside view frame")
return nil
}
if let shapeLayer = self.layer.mask as? CAShapeLayer {
if let path = shapeLayer.path {
let pointInHole = !path.contains(point, using: .evenOdd, transform: .identity)
print("Point is in a \"hole\"?", pointInHole)
if pointInHole {
return super.hitTest(point, with: event)
}
}
}
return nil
}
}
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
let r: [CGRect] = [
CGRect(x: 80, y: 120, width: 100, height: 50),
CGRect(x: 120, y: 240, width: 60, height: 120),
]
let v = CutView(frame: CGRect(x: 40, y: 80, width: 240, height: 400), rectsArray: r)
v.backgroundColor = .systemBlue
view.addSubview(v)
}
}
I might have the return super.hitTest(point, with: event) and return nil flipped - not sure which way you want it returned.
But, this example controller will print to the debug console, letting you know if the point is outside the view itself, or true/false for "in a hole."

Is there any way to implement before and after view functionality in swift ios for side by side image comparison as we move the slider left or right?

Attached video here
I want create this before and after view for side by side image comparison image here
You can do this with a layer mask.
Essentially:
private let maskLayer = CALayer()
maskLayer.backgroundColor = UIColor.black.cgColor
leftImage.layer.mask = maskLayer
var r = bounds
r.size.width = bounds.width * pct
maskLayer.frame = r
That will "clip" the image view and show only the portion covered by the mask layer.
Here's a complete example...
Using these "before" (left-side) and "after" (right-side) images:
With this sample custom view:
class RevealImageView: UIImageView {
public var leftImage: UIImage? {
didSet {
if let img = leftImage {
leftImageLayer.contents = img.cgImage
}
}
}
public var rightImage: UIImage? {
didSet {
if let img = rightImage {
self.image = img
}
}
}
// private properties
private let leftImageLayer = CALayer()
private let maskLayer = CALayer()
private let lineView = UIView()
private var pct: CGFloat = 0.5 {
didSet {
updateView()
}
}
convenience init() {
self.init(frame: .zero)
}
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
commonInit()
}
private func commonInit() {
// any opaque color
maskLayer.backgroundColor = UIColor.black.cgColor
leftImageLayer.mask = maskLayer
// the "reveal" image layer
layer.addSublayer(leftImageLayer)
// the vertical line
lineView.backgroundColor = .lightGray
addSubview(lineView)
isUserInteractionEnabled = true
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let t = touches.first
else { return }
let loc = t.location(in: self)
pct = loc.x / bounds.width
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let t = touches.first
else { return }
let loc = t.location(in: self)
pct = loc.x / bounds.width
}
private func updateView() {
// move the vertical line to the touch point
lineView.frame = CGRect(x: bounds.width * pct, y: bounds.minY, width: 1, height: bounds.height)
// update the "left image" mask to the touch point
var r = bounds
r.size.width = bounds.width * pct
// disable layer animation
CATransaction.begin()
CATransaction.setDisableActions(true)
maskLayer.frame = r
CATransaction.commit()
}
override func layoutSubviews() {
super.layoutSubviews()
leftImageLayer.frame = bounds
updateView()
}
}
and this simple example controller:
class RevealImageTestVC: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
guard let leftIMG = UIImage(named: "before"),
let rightIMG = UIImage(named: "after")
else {
return
}
let revealView = RevealImageView()
revealView.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(revealView)
let g = view.safeAreaLayoutGuide
NSLayoutConstraint.activate([
revealView.leadingAnchor.constraint(equalTo: g.leadingAnchor, constant: 20.0),
revealView.trailingAnchor.constraint(equalTo: g.trailingAnchor, constant: -20.0),
revealView.centerYAnchor.constraint(equalTo: g.centerYAnchor),
// give reveal view the same aspect ratio as the image
revealView.heightAnchor.constraint(equalTo: revealView.widthAnchor, multiplier: leftIMG.size.height / leftIMG.size.width),
])
revealView.leftImage = leftIMG
revealView.rightImage = rightIMG
}
}
We get this - the vertical line and the "viewable" portion of the images will move as you touch-and-drag:

changing size of CALayer also changes size of previously added layer's size

i'm trying to draw over UIImageView with this custome class and i would like to be able to change the size and color of lines being drawn by user. But every time i change the size or color of CALayer Previously added layer's color and size also changes then.
class DrawingImageView: UIImageView {
private lazy var path = UIBezierPath()
private lazy var previousTouchPoint = CGPoint.zero
var strokeColor: CGColor!
var strokeWidth: CGFloat!
override init(frame: CGRect) {
super.init(frame: frame)
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event)
let shapeLayer = CAShapeLayer()
shapeLayer.lineWidth = strokeWidth ?? 4
shapeLayer.strokeColor = strokeColor ?? UIColor.black.cgColor
shapeLayer.lineCap = .round
shapeLayer.lineJoin = .round
layer.addSublayer(shapeLayer)
if let location = touches.first?.location(in: self) { previousTouchPoint = location }
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
if let location = touches.first?.location(in: self) {
path.move(to: location)
path.addLine(to: previousTouchPoint)
previousTouchPoint = location
let layerA = layer.sublayers?.last as! CAShapeLayer
layerA.path = path.cgPath
}
}
}
this is how i assign that class to tempImageView
let mainImageView = UIImageView()
var tempImgView: DrawingImageView?
mainImageView.frame = CGRect(x: 0, y: self.view.bounds.height / 7, width: imageWidth, height: imageHieght)
mainImageView.contentMode = .scaleAspectFit
mainImageView.layer.zPosition = 2
mainImageView.isOpaque = false
mainImageView.backgroundColor = .clear
mainImageView.isUserInteractionEnabled = true
mainImageView.tintColor = .clear
self.view.addSubview(mainImageView)
func DrawOverImage() {
mainImageView.isHidden = true
let drawnImageView = DrawingImageView(frame: mainImageView.frame)
drawnImageView.image = mainImageView.image
drawnImageView.contentMode = .scaleAspectFit
drawnImageView.isUserInteractionEnabled = true
drawnImageView.layer.zPosition = 3
self.tempImgView?.isUserInteractionEnabled = true
self.tempImgView = drawnImageView
self.view.addSubview(tempImgView!)
}
i also tried creating array of CAShapeLayers and add layers to an array and then assign replace subLayer like View.layer.subLayers[0] = layers[0] but that also didn't work and i couldn't find any useful resource online.
any help or suggestion would be really appreciated, thanks.
That is because you are continuing the same path each time and so the latest stroke color and width gets applied to the whole path.
Without looking at much of your other logic, I believe you should initialize a new path each time in your touchesBegan and this new path should be used in touchesMoved
So in touchesBegan, try adding path = UIBezierPath() after layer.addSublayer(shapeLayer) and see if this gives you what you are looking for

Change color of a custom UIButton on .selected state

I have designed a radial gradient button for my app's home screen:
class RadialGradientButton: UIButton {
var insideColor: UIColor = ColorScheme.limeGreen
var outsideColor: UIColor = UIColor.white
var gradientLocation: CGFloat = 0.9
override func draw(_ rect: CGRect) {
let colors = [insideColor.cgColor, outsideColor.cgColor] as CFArray
let center = CGPoint(x: rect.size.width / 2, y: rect.size.height / 2)
let endRadius = rect.size.width / 2
let gradient = CGGradient(colorsSpace: nil, colors: colors, locations: [gradientLocation, 1.0])
UIGraphicsGetCurrentContext()!.drawRadialGradient(gradient!, startCenter: center, startRadius: 0.0, endCenter: center, endRadius: endRadius, options: .drawsBeforeStartLocation)
}
}
Now with this button I've ran into a user experience issue: when the button is tapped, its appearance is not changed in any way because of gradient color settings. I want it to behave like any system button - that is, insideColor should get darker on .selected state.
I've tried to play around with setNeedsDisplay() when Touch Down and Touch Up Inside events are triggered, but this method does not perform immediate redrawing, plus can be harsh on performance.
Maybe someone here can lead me in a right direction.
draw() method is called when a view is first displayed or when an event occurs that invalidates a visible part of the view. Overriding this method, UIKit creates and configures a graphics context for drawing and render your gradient layer on it. That's why your selected background is not working.
You can achieve this behaviour by insert two layer on your view- one for highlighted and another for normal state. Later change behaviour on touch event.
import UIKit
import QuartzCore
class RoundedButton: UIButton {
var gradientBackgroundLayer : CAGradientLayer?
var selectedLayer : CALayer?
var insideColor: UIColor = UIColor.green;
var outsideColor: UIColor = UIColor.white;
var gradientLocation: NSNumber = 0.9;
let kAnimationLength:CGFloat = 0.15;
override init(frame: CGRect) {
super.init(frame: frame);
commonInit();
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder);
commonInit();
}
override func awakeFromNib() {
super.awakeFromNib();
commonInit();
}
func commonInit() {
self.backgroundColor = nil;
self.clipsToBounds = true;
self.setTitleColor(UIColor.white, for: UIControlState.normal)
let colors : [CGColor] = [insideColor.cgColor, outsideColor.cgColor];
let locations : [NSNumber] = [0,gradientLocation];
let layer : CAGradientLayer = CAGradientLayer();
layer.frame = self.bounds;
layer.colors = colors;
layer.locations = locations;
self.gradientBackgroundLayer = layer;
let selectedLayer : CALayer = CALayer();
selectedLayer.frame = self.bounds;
selectedLayer.backgroundColor = UIColor.lightGray.cgColor
self.selectedLayer = selectedLayer;
}
func performLayout() {
self.gradientBackgroundLayer?.frame = self.bounds;
self.selectedLayer?.frame = self.bounds;
self.layer.insertSublayer(self.gradientBackgroundLayer!, at: 1);
self.layer.insertSublayer(self.selectedLayer!, at: 0);
self.layer.cornerRadius = self.frame.size.height / 2.0;
}
override func layoutSubviews() {
super.layoutSubviews();
performLayout();
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event);
UIView.animate(withDuration: TimeInterval(kAnimationLength), delay: 0, options: UIViewAnimationOptions.curveEaseIn, animations: {
self.isHighlighted = true;
self.selectedLayer?.isHidden = false;
self.gradientBackgroundLayer?.isHidden = true;
}, completion: nil);
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesEnded(touches, with: event);
UIView.animate(withDuration: TimeInterval(kAnimationLength), delay: 0, options: UIViewAnimationOptions.curveEaseOut, animations: {
self.isHighlighted = false;
self.gradientBackgroundLayer?.isHidden = false;
self.selectedLayer?.isHidden = true;
}, completion: nil);
}
}
Try this. Hope it will work.
when touch Up Inside.
if self.mButton.isSelected = true {
self.mButton.isSelected = false
self.mButton.setBackgroundColor(UIColor.red, forState: .normal)
}
else{
self.mButton.isSelected = true
self.mButton.setBackgroundColor(UIColor.blue, forState: .normal)
}
and you can use this code in viewDidLoad()

Swift callback function for draw()?

I have a customView where I overwrite drawRect:. I also have some cases, where I want to display a UILabel ontop of my customView:
if self.ticks.count < 50 {
self.label.frame = self.bounds
self.label.text = "This chart requires 50 or more ticks.\nKeep Ticking."
self.addSubview(label)
}
However if I call this code before the drawRect is called, then my label doesn't show up. Is there a callback method like viewDidDraw or something?
EDIT
Someone asked for it, so here is all the code that is now doing what I need it to do. I think the answer to the question is to override layoutSubviews.
#IBDesignable open class CustomView: UIView {
#objc open var ticks:[Tick] = []
let label = UILabel()
required public init?(coder: NSCoder) {
super.init(coder: coder)
setup()
}
required public override init(frame: CGRect) {
super.init(frame: frame)
setup()
}
func setup(){
self.backgroundColor = UIColor.clear
self.label.frame = self.bounds
self.label.text = "This chart requires 50 or more ticks.\nKeep Ticking."
self.label.numberOfLines = 0
self.label.textAlignment = .center
self.addSubview(label)
}
override open func layoutSubviews() {
self.label.frame = self.bounds
}
#objc open func setSessions(sessions:[Session]){
self.ticks = []
for session in sessions.reversed(){
if self.ticks.count < 250{
self.ticks = self.ticks + (session.getShots() as! [Shot])
}else{
break
}
}
if self.ticks.count < 50 {
self.label.isHidden = false
}else{
self.label.isHidden = true
}
self.bringSubview(toFront: self.label)
}
override open func draw(_ rect: CGRect) {
let context = UIGraphicsGetCurrentContext()!
let width = rect.width - 10;
let height = rect.height - 10;
let center = CGPoint(x: width/2+5, y: height/2+5)
let big_r = min(width, height)/2
context.setLineWidth(1.5)
UIColor.white.setStroke()
//draw the rings
for i in stride(from: 1, to: 7, by: 1){
let r = big_r * CGFloat(i) / 6.0;
context.addArc(center:center, radius:r, startAngle: 0, endAngle: CGFloat(2*Double.pi), clockwise: false)
context.strokePath()
}
//draw the ticks
if self.ticks.count > 49 {
UIColor.red().setFill()
for (i, tick) in self.ticks.prefix(250).enumerated(){
let radius = min((100.0-CGFloat(shot.score))/30.0 * big_r, big_r);
let x = Utilities.calculateX(tick, radius)
let y = Utilities.calculateY(tick, radius)
let point = CGPoint(x: x+center.x,y: y+center.y)
context.addArc(center:point, radius:CGFloat(DOT_WIDTH/2), startAngle: 0, endAngle: CGFloat(2*Double.pi), clockwise: false)
context.fillPath()
}
}
}
}
You can make your custom view redraw by calling setNeedsDisplay() when needed. After that, you can add the label
You can set a delegation for the View class and call the delegate method at the end of the draw function. However, it seems that if you call that directly, the draw function will only end until the delegate function get processed. Which means that the code will be executed before the new drawing refresh on the screen. In that case, you may need to call it asynchronously.
// at end of draw function
DispatchQueue.main.async {
self.delegate?.drawEnd()
}

Resources