How to drag certain image in iOS? - ios

Wanted to know how I can drag a image across screen and what code would be used. Tried looking up but only older versions of Swift have answer and no longer work. I want to drag the image, but not place finger on screen and it goes to that spot. Just drag.
Gives me the error:
"Use of undeclared type 'uitouch'"
import UIKit
class DraggableImage: UIImageView {
override func touchesMoved(touches: Set<uitouch>, withEvent event: UIEvent?) {
if let touch = touches.first {
let position = touch.locationInView(superview)
center = CGPointMake(position.x, position.y)
}
}
}

You need to subclass UIImageView and in the init you need to set userInteractionEnabled = true and then override this method override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) well, my code is this:
class DraggableImage: UIImageView {
var localTouchPosition : CGPoint?
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.layer.borderWidth = 1
self.layer.borderColor = UIColor.red.cgColor
self.isUserInteractionEnabled = true
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first
self.localTouchPosition = touch?.preciseLocation(in: self)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
let touch = touches.first
guard let location = touch?.location(in: self.superview), let localTouchPosition = self.localTouchPosition else{
return
}
self.frame.origin = CGPoint(x: location.x - localTouchPosition.x, y: location.y - localTouchPosition.y)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
self.localTouchPosition = nil
}
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
override func drawRect(rect: CGRect) {
// Drawing code
}
*/
}
This is how it looks
Hope this helps

Create a Nsobject Class for moving View and add following Code
import UIKit
class objectClass: UIImageView, UIGestureRecognizerDelegate {
/*
// Only override draw() if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
override func draw(_ rect: CGRect) {
// Drawing code
}
*/
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch: UITouch = touches.first!
self.center = touch.location(in: self.superview)
}
}
in mainViewController make a object of NSobject class
var newView: objectClass = objectClass()
on button Action to add new View
#IBAction func objectAdded(theButton: UIButton!) {
let frame = CGRect(x: 100, y: 100, width: 44, height: 44)
newView = objectClass(frame: frame)
if theButton.titleLabel?.text == "image1" {
newView.image = UIImage(named: "1")
} else if theButton.titleLabel?.text == "image2" {
newView.image = UIImage(named: "2")
}else{
newView.image = UIImage(named: "3")
}
newView.contentMode = .scaleAspectFill
newView.isUserInteractionEnabled = true
self.view .addSubview(newView)
newView.alpha = 0
UIView .animate(withDuration: 0.4) {
self.newView.alpha = 1
}
UIView.animate(withDuration: 0.6, delay: 0, options: .curveEaseOut, animations: { () -> Void in
self.sliderViewBottomLayoutConstraint.constant = self.sliderViewBottomLayoutConstraint.constant - self.sliderViewBottomLayoutConstraint.constant
self.view.layoutIfNeeded()
}, completion: nil)
image1Button.isEnabled = false
image2Button.isEnabled = false
image3Button.isEnabled = false
let pinchGesture: UIPinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(ViewController.recognizePinchGesture(sender:)))
pinchGesture.delegate = self
let rotateGesture: UIRotationGestureRecognizer = UIRotationGestureRecognizer(target: self, action: #selector(ViewController.recognizeRotateGesture(sender:)))
let tapGesture: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(ViewController.RemoveSelectedImageOnTap(sender:)))
tapGesture.numberOfTapsRequired = 2
self.newView.addGestureRecognizer(tapGesture)
self.newView.addGestureRecognizer(pinchGesture)
self.newView.addGestureRecognizer(rotateGesture)
}
func recognizePinchGesture(sender: UIPinchGestureRecognizer) {
sender.view!.transform = sender.view!.transform.scaledBy(x: sender.scale, y: sender.scale)
sender.scale = 1
}
func recognizeRotateGesture(sender: UIRotationGestureRecognizer) {
sender.view!.transform = sender.view!.transform.rotated(by: sender.rotation)
sender.rotation = 0
}

Related

SKScene nodes not detecting touch

I am trying to make an ARKit app for ios and the nodes in the scene are not responding to touch. The scene is properly displayed but I haven't been able to detect any touch.
fileNamed: "TestScene" refers to a TestScene.sks file in my project which is empty and I add the node in the code as shown below.
let detailPlane = SCNPlane(width: xOffset, height: xOffset * 1.4)
let testScene = SKScene(fileNamed: "TestScene")
testScene?.isUserInteractionEnabled = true
let winner = TouchableNode(fontNamed: "Chalkduster")
winner.text = "You Win!"
winner.fontSize = 65
winner.fontColor = SKColor.green
winner.position = CGPoint(x: 0, y: 0)
testScene?.addChild(winner)
let material = SCNMaterial()
material.diffuse.contents = testScene
material.diffuse.contentsTransform = SCNMatrix4Translate(SCNMatrix4MakeScale(1, -1, 1), 0, 1, 0)
detailPlane.materials = [material]
let node = SCNNode(geometry: detailPlane)
rootNode.addChildNode(node)
For TouchableNode I have the following class
class TouchableNode : SKLabelNode {
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
print("Touch detected")
}
}
I've achieved this affect using gesture recognize
private func registerGestureRecognizers() -> Void {
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTap))
sceneView.addGestureRecognizer(tapGestureRecognizer)
}
then have a function to handle the tap gesture
#objc private func handleTap(sender: UITapGestureRecognizer) -> Void {
let sceneViewTappedOn = sender.view as! SCNView
let touchCoordinates = sender.location(in: sceneViewTappedOn)
let hitTest = sceneViewTappedOn.hitTest(touchCoordinates)
if !hitTest.isEmpty {
let hitResults = hitTest.first!
var hitNode = hitResults.node
// do something with the node that has been tapped
}
}
}
You need to do isUserInteractionEnabled = true first.
So, something like:
class TouchableNode : SKLabelNode {
override init() {
super.init()
isUserInteractionEnabled = true
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
isUserInteractionEnabled = true
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
print("Touch detected")
}
}

Touch drawing not working when UIPanGestureRecognizer implemented for view

I have screen to test the device touch screen with popping bubbles. And some imageView added in subviews of bubbles made of cross for them. Then user swipe over the bubbles to check the touch screen.
And I want drawing on the same view. When user swipes the finger over the bubbles, a line will be drawn. I have separate class for drawing and assign it to main parent view of controller.
If I remove code for UIPanGestureRecognizer then drawing works and there are no lags.
If I add gesture to view for popping the bubbles like this
view.addGestureRecognizer(gestureRecognizer)
Then there is a lag, and drawing doesn't work.
I want both things like popping bubbles and drawing on view.
The main problem of this gesture is when I add this in view, then drawing works without any lag but popping bubbles doesn't work.
let gestureRecognizer : UIPanGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(panGestureRecognized(_:)))
gestureRecognizer.maximumNumberOfTouches = 1
gestureRecognizer.minimumNumberOfTouches = 1
view.addGestureRecognizer(gestureRecognizer)
Drawing view class
import UIKit
class DrawingView: UIView {
var drawColor = UIColor.black
var lineWidth: CGFloat = 5
private var lastPoint: CGPoint!
private var bezierPath: UIBezierPath!
private var pointCounter: Int = 0
private let pointLimit: Int = 128
private var preRenderImage: UIImage!
// MARK: - Initialization
override init(frame: CGRect) {
super.init(frame: frame)
initBezierPath()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
initBezierPath()
}
func initBezierPath() {
bezierPath = UIBezierPath()
bezierPath.lineCapStyle = CGLineCap.round
bezierPath.lineJoinStyle = CGLineJoin.round
}
// MARK: - Touch handling
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch: AnyObject? = touches.first
lastPoint = touch!.location(in: self)
pointCounter = 0
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch: AnyObject? = touches.first
let newPoint = touch!.location(in: self)
bezierPath.move(to: lastPoint)
bezierPath.addLine(to: newPoint)
lastPoint = newPoint
pointCounter += 1
if pointCounter == pointLimit {
pointCounter = 0
renderToImage()
setNeedsDisplay()
bezierPath.removeAllPoints()
}
else {
setNeedsDisplay()
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
pointCounter = 0
renderToImage()
setNeedsDisplay()
bezierPath.removeAllPoints()
}
override func touchesCancelled(_ touches: Set<UITouch>?, with event: UIEvent?) {
touchesEnded(touches!, with: event)
}
// MARK: - Pre render
func renderToImage() {
UIGraphicsBeginImageContextWithOptions(self.bounds.size, false, 0.0)
if preRenderImage != nil {
preRenderImage.draw(in: self.bounds)
}
bezierPath.lineWidth = lineWidth
drawColor.setFill()
drawColor.setStroke()
bezierPath.stroke()
preRenderImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
// MARK: - Render
override func draw(_ rect: CGRect) {
super.draw(rect)
if preRenderImage != nil {
preRenderImage.draw(in: self.bounds)
}
bezierPath.lineWidth = lineWidth
drawColor.setFill()
drawColor.setStroke()
bezierPath.stroke()
}
// MARK: - Clearing
func clear() {
preRenderImage = nil
bezierPath.removeAllPoints()
setNeedsDisplay()
}
// MARK: - Other
func hasLines() -> Bool {
return preRenderImage != nil || !bezierPath.isEmpty
}
}

UIButton Frame changed After Move from one Position To Another

I have 1 UIButton in StoryBoard like below screen and I move UIButton from one position to another position by following this Answer.
Edit
Another thing that I want to rotate UIButton and for that I have tried below code and it works fine but after rotating UIButton when I try to move UIButton position from 1 place to another then UIButton frame is changed.
Entire UIButton code.
class DraggableButton: UIButton {
var localTouchPosition : CGPoint?
var lastRotation: CGFloat = 0
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
// ROTATION CODE
let rotate = UIRotationGestureRecognizer(target: self, action:#selector(rotatedView(_:)))
self.addGestureRecognizer(rotate)
}
// SCROLLING CONTENT FROM ONE POSITION TO ANOTHER
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event)
let touch = touches.first
self.localTouchPosition = touch?.preciseLocation(in: self)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
let touch = touches.first
guard let location = touch?.location(in: self.superview), let localTouchPosition = self.localTouchPosition else{
return
}
self.frame.origin = CGPoint(x: location.x - localTouchPosition.x, y: location.y - localTouchPosition.y)
print(self.frame)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesEnded(touches, with: event)
self.localTouchPosition = nil
}
override func touchesCancelled(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesCancelled(touches, with: event)
self.localTouchPosition = nil
}
// ROTATION CODE
#objc func rotatedView(_ sender: UIRotationGestureRecognizer) {
var originalRotation = CGFloat()
if sender.state == .began {
sender.rotation = lastRotation
originalRotation = sender.rotation
} else if sender.state == .changed {
let newRotation = sender.rotation + originalRotation
sender.view?.transform = CGAffineTransform(rotationAngle: newRotation)
} else if sender.state == .ended {
lastRotation = sender.rotation
}
}
}
Edit 1
I uploaded issue video.
Edit 2
If I used UIButton Rotation and Movement Code separately then it works but when I write both code it generate this issue.
I believe the issue is that the rotation is about the center of the view, so when you apply the rotation the frame size increases which throws off the distance relative to the origin of the frame.
I was able to fix this by moving the view relative to its center:
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesBegan(touches, with: event)
let touch = touches.first
guard let location = touch?.location(in: self.superview) else { return }
// Store localTouchPosition relative to center
self.localTouchPosition = CGPoint(x: location.x - self.center.x, y: location.y - self.center.y)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesMoved(touches, with: event)
let touch = touches.first
guard let location = touch?.location(in: self.superview), let localTouchPosition = self.localTouchPosition else{
return
}
self.center = CGPoint(x: location.x - localTouchPosition.x, y: location.y - localTouchPosition.y)
print(self.frame)
}

Draw on large UIView take a lot of memory

when I start to draw on large UIView ( width: 3700 , height: 40000 ), it takes a lot of memory
when app starts, memory is 150 MB and when start drawing on it( calling setNeedsDisplay method) take around 1 GB and app is gonna crash
class DrawingVc: UIViewController {
let contentView = DrawableView()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
contentView.translatesAutoresizingMaskIntoConstraints = false
contentView.backgroundColor = .clear
self.view.addSubview(contentView)
contentView.frame = CGRect(x: 0, y: 0, width:view.frame.width, height:
view.frame.height * 50)
}
here is the code of custom view, as you can see, setNeedsDisplay runs on touchMoves
class DrawableView: UIView {
var mLastPath: UIBezierPath?
weak var scdelegate: DrawableViewDelegate?
var isDrawEnable = true
private var drawingLines : [UIBezierPath] = []
override init(frame: CGRect) {
super.init(frame: frame)
}
required public init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
override func draw(_ rect: CGRect) {
debugPrint("request draw")
drawLine()
}
private func drawLine() {
UIColor.blue.setStroke()
for line in drawingList {
line.lineWidth = 4
line.stroke()
line.lineCapStyle = .round
}
}
var drawingList = [UIBezierPath]()
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
if touches.count == 2 {
return
}
let location = (touches.first?.location(in: self))!
mLastPath = UIBezierPath()
mLastPath?.move(to: location)
prevPoint = location
drawingList.append(mLastPath!)
}
var prevPoint: CGPoint?
var isFirst = true
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
debugPrint("touchesMoved: " , (touches.first?.location(in: self).x)! , (touches.first?.location(in: self).y)! )
if let coalescedtouches = event?.coalescedTouches(for: touches.first!)
{
for coalescedTouch in coalescedtouches
{
let locationInView = coalescedTouch.location(in: self)
if let prevPoint = prevPoint {
let midPoint = CGPoint( x: (locationInView.x + prevPoint.x) / 2, y: (locationInView.y + prevPoint.y) / 2)
if isFirst {
mLastPath?.addLine(to: midPoint)
}else {
mLastPath?.addQuadCurve(to: midPoint, controlPoint: prevPoint)
}
isFirst = false
} else {
mLastPath?.move(to: locationInView)
}
prevPoint = locationInView
}
}
setNeedsDisplay()
}
}
What makes this problem and how that fix?
Your view is larger than the largest possible screen on an iOS device, so I suppose your view is embedded in a scrollview. You should only draw the visible parts of your view. Unfortunately, this is not supported by UIView directly. You may take a look on CATiledLayer, which supports drawing of only visible parts of a layer, and it supports different levels of details for zoomed layers, too.

Removing lagging latency in drawing UIBezierPath smooth lines in Swift

The code below draws smooth curved lines by overriding touches, but there is noticeable lagging or latency. The code uses addCurveToPoint and calls setNeedsDisplay after every 4 touch points which causes a jumpy appearance as the drawing doesn't keep up with finger movements. To remove the lagging or perceived latency, touch points 1, 2, 3 (leading up to touch point 4) could be temporarily filled with addQuadCurveToPoint and addLineToPoint.
How can this actually be achieved in code to remove perceived lagging by using a temporary Line and QuadCurved line before displaying a final Curved line?
If the below class is attached to one UIView (e.g. viewOne or self), how do I make a copy of the drawing to another UIView outside the class (e.g. viewTwo) after touchesEnded?
// ViewController.swift
import UIKit
class drawSmoothCurvedLinesWithLagging: UIView {
let path=UIBezierPath()
var incrementalImage:UIImage?
var points = [CGPoint?](count: 5, repeatedValue: nil)
var counter:Int?
var strokeColor:UIColor?
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
override func drawRect(rect: CGRect) {
autoreleasepool {
incrementalImage?.drawInRect(rect)
strokeColor = UIColor.blueColor()
strokeColor?.setStroke()
path.lineWidth = 20
path.lineCapStyle = CGLineCap.Round
path.stroke()
}
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
counter = 0
let touch: AnyObject? = touches.first
points[0] = touch!.locationInView(self)
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
let touch: AnyObject? = touches.first
let point = touch!.locationInView(self)
counter = counter! + 1
points[counter!] = point
if counter == 2{
//use path.addLineToPoint ?
//use self.setNeedsDisplay() ?
}
if counter == 3{
//use path.addQuadCurveToPoint ?
//use self.setNeedsDisplay() ?
}
if counter == 4{
points[3]! = CGPointMake((points[2]!.x + points[4]!.x)/2.0, (points[2]!.y + points[4]!.y)/2.0)
path.moveToPoint(points[0]!)
path.addCurveToPoint(points[3]!, controlPoint1: points[1]!, controlPoint2: points[2]!)
self.setNeedsDisplay()
points[0]! = points[3]!
points[1]! = points[4]!
counter = 1
}
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
self.drawBitmap()
self.setNeedsDisplay()
path.removeAllPoints()
counter = 0
}
override func touchesCancelled(touches: Set<UITouch>?, withEvent event: UIEvent?) {
self.touchesEnded(touches!, withEvent: event)
}
func drawBitmap(){
UIGraphicsBeginImageContextWithOptions(self.bounds.size, true, 0.0)
strokeColor?.setStroke()
if((incrementalImage) == nil){
let rectPath:UIBezierPath = UIBezierPath(rect: self.bounds)
UIColor.whiteColor().setFill()
rectPath.fill()
}
incrementalImage?.drawAtPoint(CGPointZero)
path.stroke()
incrementalImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
}
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Yes, adding a curve every few points will give it a stuttering lag. So, yes, you can reduce this affect by adding a line to points[1], adding a quad curve to points[2] and adding a cubic curve to points[3].
As you said, make sure to add this to a separate path, though. So, in Swift 3/4:
class SmoothCurvedLinesView: UIView {
var strokeColor = UIColor.blue
var lineWidth: CGFloat = 20
var snapshotImage: UIImage?
private var path: UIBezierPath?
private var temporaryPath: UIBezierPath?
private var points = [CGPoint]()
override func draw(_ rect: CGRect) {
snapshotImage?.draw(in: rect)
strokeColor.setStroke()
path?.stroke()
temporaryPath?.stroke()
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
if let touch = touches.first {
points = [touch.location(in: self)]
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else { return }
let point = touch.location(in: self)
points.append(point)
updatePaths()
setNeedsDisplay()
}
private func updatePaths() {
// update main path
while points.count > 4 {
points[3] = CGPoint(x: (points[2].x + points[4].x)/2.0, y: (points[2].y + points[4].y)/2.0)
if path == nil {
path = createPathStarting(at: points[0])
}
path?.addCurve(to: points[3], controlPoint1: points[1], controlPoint2: points[2])
points.removeFirst(3)
temporaryPath = nil
}
// build temporary path up to last touch point
if points.count == 2 {
temporaryPath = createPathStarting(at: points[0])
temporaryPath?.addLine(to: points[1])
} else if points.count == 3 {
temporaryPath = createPathStarting(at: points[0])
temporaryPath?.addQuadCurve(to: points[2], controlPoint: points[1])
} else if points.count == 4 {
temporaryPath = createPathStarting(at: points[0])
temporaryPath?.addCurve(to: points[3], controlPoint1: points[1], controlPoint2: points[2])
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
finishPath()
}
override func touchesCancelled(_ touches: Set<UITouch>?, with event: UIEvent?) {
finishPath()
}
private func finishPath() {
constructIncrementalImage()
path = nil
setNeedsDisplay()
}
private func createPathStarting(at point: CGPoint) -> UIBezierPath {
let localPath = UIBezierPath()
localPath.move(to: point)
localPath.lineWidth = lineWidth
localPath.lineCapStyle = .round
localPath.lineJoinStyle = .round
return localPath
}
private func constructIncrementalImage() {
UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0.0)
strokeColor.setStroke()
snapshotImage?.draw(at: .zero)
path?.stroke()
temporaryPath?.stroke()
snapshotImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
}
You could even marry this with iOS 9 predictive touches (as I described in my other answer), which could reduce lag even further.
To take this resulting image and use it elsewhere, you can just grab the incrementalImage (which I renamed to snapshotImage, above), and drop it into an image view of the other view.
For Swift 2 rendition, see previous revision of this answer.

Resources