Drawing on UIImageView within UIScrollView - ios

About my app: The user can view a PDF file within a UIWebView. I have an option for the user to choose whether they want to scroll through the pdf or take notes on it. When they take notes, the scrolling is disabled, and vice-versa. However, when the user is drawing, the lines move up and become hazy as shown:
(The red boxes are text within the pdf)
Here is my code:
Switching between the pen and scroll:
var usingPen = false
#IBAction func usePen(sender: AnyObject) {
usingPen = true
webView.userInteractionEnabled = false
UIView.animateWithDuration(0.3) { () -> Void in
self.popUpView.alpha = 0
}
}
#IBAction func useScroll(sender: AnyObject) {
usingPen = false
webView.userInteractionEnabled = true
UIView.animateWithDuration(0.3) { () -> Void in
self.popUpView.alpha = 0
}
}
The imageView the user draws on (objectView):
var objectView = UIImageView()
override func viewDidAppear(animated: Bool) {
objectView.frame.size = webView.scrollView.contentSize
webView.scrollView.addSubview(objectView)
}
Drawing on the image view:
var start = CGPoint()
let size: CGFloat = 3
var color = UIColor.blackColor()
func draw(start: CGPoint, end: CGPoint) {
if usingPen == true {
UIGraphicsBeginImageContext(self.objectView.frame.size)
let context = UIGraphicsGetCurrentContext()
objectView.image?.drawInRect(CGRect(x: 0, y: 0, width: objectView.frame.width, height: objectView.frame.height))
CGContextSetFillColorWithColor(context, color.CGColor)
CGContextSetStrokeColorWithColor(context, color.CGColor)
CGContextSetLineWidth(context, size)
CGContextBeginPath(context)
CGContextMoveToPoint(context, start.x, start.y)
CGContextAddLineToPoint(context, end.x, end.y)
CGContextStrokePath(context)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
objectView.image = newImage
}
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
start = (touches.first?.locationInView(self.objectView))!
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
draw(start, end: (touches.first?.locationInView(self.objectView))!)
start = (touches.first?.locationInView(self.objectView))!
}
How can I prevent the haziness and movement of the drawings? Thanks for your help.

This probably occurs due to an image scaling issue. Since you're drawing into an image with a scale factor of 1 (which is the default) and your screen has a scale factor of 2 or 3, lines will continue to blur slightly every time they're copied and drawn. The solution is to specify the screen's scale when you create your image context:
UIGraphicsBeginImageContextWithOptions(self.objectView.frame.size, false, UIScreen.mainScreen().scale)
Note that the way you're drawing the line is fairly inefficient. Instead, you probably want to create a CGBitmapContext and continually draw to that; it'd be much faster and would also eliminate the "generation loss" problem you have.

Related

Erasing a stroke path with Quartz2D is deleting more than just the path

I'm trying to set an eraser tool for a drawing app. I've been following this tutorial for the basics, but the drawings happen in a white background so the deletion part is not covered (they draw in white color to delete)
I've been implementing a method to delete my drawings, and it works pretty well. I draw a circle, set the color and the blend mode to clear and set the path where I want to draw that circle. Then I get an image returned by the method UIGraphicsGetImageFromCurrentImageContext() that updates my current image, with the new drawing over it.
The drawn circle it's doing it's job and it deletes where it has been drawn. But the problem is the image begins to get deleted from the right to the left, covering all it's height, and this is certainly not set anywhere in my code.
I don't know if it's a bug.
I've tried everything and I can't get a new image from context without this new deletion line in the right. And the more I draw, the more the new line grows.
As you can see in the gif, it looks like I'm drawing in gray over a white background, but the white color is an image that has been filled with white color, and it's background color it's gray, so I'm deleting.
The code:
In a UIView (blue background) I initialize and add two white columns as subviews:
func addNewColumn(){
let column : Column = Column.init(frame: CGRect(x: self.frame.size.width*0.7, y: 0, width: self.frame.size.width*0.125, height: self.frame.size.height))
let column2 : Column = Column.init(frame: CGRect(x: self.frame.size.width*0.2, y: 0, width: self.frame.size.width*0.125, height: self.frame.size.height))
self.addSubview(column)
self.addSubview(column2)
}
The initialization of the column in its class:
import UIKit
class Column: UIImageView {
//Touches Positions
var firstPoint : CGPoint?
var lastPoint : CGPoint?
override init(frame: CGRect) {
super.init(frame: frame)
self.isUserInteractionEnabled = true
self.draw(frame)
self.backgroundColor = UIColor.gray
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
//Drawing of the white color in the image, over the blue background
override func draw(_ rect: CGRect) {
UIGraphicsBeginImageContextWithOptions(self.bounds.size, false, 0)
let context = UIGraphicsGetCurrentContext()
context?.setFillColor(UIColor.white.cgColor)
context?.setBlendMode(.normal)
context?.fill(self.bounds)
self.image = UIGraphicsGetImageFromCurrentImageContext()
self.alpha = 1
UIGraphicsEndImageContext()
}
//The method called from touchesMoved() to delete the white color
func eraseColumn() {
UIGraphicsBeginImageContextWithOptions(self.bounds.size, false, 0)
let context = UIGraphicsGetCurrentContext()
self.image?.draw(in: self.bounds)
context?.beginPath()
context?.addEllipse(in: CGRect(x: (lastPoint?.x)!, y: (lastPoint?.y)!, width: 10, height: 10))
context?.setLineCap(.round)
context?.setLineWidth(10)
context?.setStrokeColor(UIColor.clear.cgColor)
context?.setBlendMode(.clear)
context?.strokePath()
context?.closePath()
self.image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
}
//Touches handling
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first
firstPoint = touch?.location(in: self)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first
lastPoint = touch?.location(in: self)
eraseColumn()
}
}
As you can see in the draw(_ rect: CGRect) method, I fill the image with white color and set it's background color to gray.
And in the eraseColumn() method, I delete the white color that the user touched. No matter what I try, the image is being deleted. I simply don't know why this is happening.
Any help would be very appreciated.
Project is in swift 3, X-Code 9.2.
Finally found the solution
changing
self.image?.draw(in: self.bounds)
to
self.image?.draw(at: self.bounds.origin)
fixes the problem. Hope it will help somebody.
Cheers

Custom UISlider visualisation error (thumb position, track edge...)

this is my first time asking question here, if there is any part unclear in my question, please let me know, thank you.
So I am new to iOS, recently for a project I created a custom slider which looks like a iOS-style switch. Below is my code:
#IBDesignable
class CustomSlider: UISlider {
#IBInspectable var trackWidth: CGFloat = 70
#IBInspectable var trackHeight: CGFloat = 30
#IBInspectable var thumbHeight: CGFloat = 30
private var thumbTouchSize = CGSize(width: 70, height: 30)
override func trackRect(forBounds bounds: CGRect) -> CGRect {
return CGRect(origin: bounds.origin, size: CGSize(width: bounds.width, height: trackHeight))
}
override func thumbRect(forBounds bounds: CGRect, trackRect rect: CGRect, value: Float) -> CGRect {
var thumb = super.thumbRect(forBounds: bounds, trackRect: rect, value: value)
thumb.size.height = thumbHeight
return thumb.offsetBy(dx: 0, dy: 0)
}
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
let increasedBounds = bounds.insetBy(dx: -thumbTouchSize.width, dy: -thumbTouchSize.height)
let containsPoint = increasedBounds.contains(point)
return containsPoint
}
override func beginTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
let percentage = CGFloat((value - minimumValue) / (maximumValue - minimumValue))
let thumbSizeHeight = thumbRect(forBounds: bounds, trackRect:trackRect(forBounds: bounds), value:0).size.height
let thumbPosition = thumbSizeHeight + (percentage * (bounds.size.width - (2 * thumbSizeHeight)))
let touchLocation = touch.location(in: self)
return touchLocation.x <= (thumbPosition + thumbTouchSize.width) && touchLocation.x >= (thumbPosition - thumbTouchSize.width)
}
}
This is how it looks :
custom UISlider initial state
when thumb moves towards right end
custom UISlider max. value
As you can see,
(1) I set same height for the track and the thumb, however the thumb is not in the vertical center of the track, which does not look good.
(2) when I pull the slider to the right end, the edge becomes flat which should not happen (edges should always remain round like the initial state).
I tried all the suggested solutions, none solved my problem. Please help, many thanks in advance.

Swift Drawing App is Laggy

I am currently working on a drawing app. I'm building it for iOS, and am using Swift 3 to do so.
It's just a basic drawing app, but I'm trying to add an extra feature. I started with a UIScrollView, then added an Image View to that scroll view. All of the drawing part is done with the image view. When you first launch the app, the scrollview is completely zoomed in. When you switch to "zoom mode" this allows you to pinch to zoom. The problem is, when you first open the app, when you draw while zoomed in, the drawing is really fuzzy. In order to fix this, I can use a line of code like this:
UIGraphicsBeginImageContextWithOptions((self.view.frame.size), false, 7.0)
This causes the drawing to look great while zoomed in, but causes the app to run very laggy. The thing that confuses me though, if I change the above code to this:
UIGraphicsBeginImageContextWithOptions((self.view.frame.size), false, 0.0)
And zoom out all the way, the drawing looks exactly the same (granted, I'm zoomed all the way out) but it's no longer laggy. I know this probably isn't coming across super clearly so here's a video showing what happens in the first scenario: https://youtu.be/E_9FKf1pUTY and in the second: https://youtu.be/OofFTS4Q0OA
So basically, I'm wondering if there's a way to treat the zoomed in area as if it was its own view. It seems to me as if the app is updating the entire image view, rather than just the part that is visible at any given time. Is there a way to update only the portion of the image view that is drawn on? Sorry if this is a bit of a confusing post, feel free to ask questions if there's anything you don't understand. Just for clarity sake, I'll include all of the drawing code below:
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
print("Touches began")
swiped = true
if let touch = touches.first {
lastPoint = touch.location(in: scrollView)
lastPoint.x = lastPoint.x / scrollView.zoomScale
lastPoint.y = lastPoint.y / scrollView.zoomScale
}
}
func drawLines(fromPoint:CGPoint,toPoint:CGPoint) {
print("\(fromPoint.x), \(fromPoint.y)")
//UIGraphicsBeginImageContext(self.view.frame.size)
UIGraphicsBeginImageContextWithOptions((scrollView.frame.size), false, 0.0)
imageView.image?.draw(in: CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height))
let context = UIGraphicsGetCurrentContext()
context?.move(to: CGPoint(x: fromPoint.x, y: fromPoint.y))
context?.addLine(to: CGPoint(x: toPoint.x, y: toPoint.y))
context?.setBlendMode(CGBlendMode.normal)
context?.setLineCap(CGLineCap.round)
if erase == true {
context?.setLineWidth(30)
}
if erase == false {
context?.setLineWidth(CGFloat(sizeVar))
}
if color == "black" {
context?.setStrokeColor(UIColor.black.cgColor)
}
if color == "white" {
context?.setStrokeColor(UIColor.white.cgColor)
}
if color == "blue" {
context?.setStrokeColor(UIColor.blue.cgColor)
}
if color == "cyan" {
context?.setStrokeColor(UIColor.cyan.cgColor)
}
if color == "green" {
context?.setStrokeColor(UIColor.green.cgColor)
}
if color == "magenta" {
context?.setStrokeColor(UIColor.magenta.cgColor)
}
if color == "red" {
context?.setStrokeColor(UIColor.red.cgColor)
}
if color == "yellow" {
context?.setStrokeColor(UIColor.yellow.cgColor)
}
context?.strokePath()
imageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = true
if let touch = touches.first {
var currentPoint = touch.location(in: scrollView)
currentPoint.x = currentPoint.x / scrollView.zoomScale
currentPoint.y = currentPoint.y / scrollView.zoomScale
drawLines(fromPoint: lastPoint, toPoint: currentPoint)
lastPoint = currentPoint
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if !swiped {
drawLines(fromPoint: lastPoint, toPoint: lastPoint)
}
}
The scale parameter in the function UIGraphicsBeginImageContextWithOptions(_:_:_:) is not a quality setting, and you should not put arbitrary values there. It's a scale factor, and it's akin to the #1x, #2x, and #3x settings for images. It tells the system the scaling factor to use when mapping image pixels to screen pixels. In most (almost all) cases you should use 0, which means "use the native scale of the screen" (#2x for normal retina, or #3x for iPhone 6+ and 7+.) You should never set it to an arbitrary value like 7. That creates an image with 7x as many pixels as normal, and forces the system to scale it for screen drawing every time, which takes more time and is slower.
Next, creating a new image for each new line is a dreadfully inefficient way to do drawing. It creates and releases large blocks of memory constantly, and then has to completely redraw the screen each time. Instead I would set up a view that has a CAShapeLayer as it's backing layer and update the path that's installed in the layer.

iOS How to Save Image from UIView without lose quality of image

I have images that are drawn in a UIView, then lines are drawn on them. How can I can save the image with the painted lines to a UIImage object with the same image size it began with, and without losing any image quality?
ie. I have an image of size (3264, 2448).
That is drawn in a UIView (size 375, 281) that AspectFit with the image,
then a line is painted on the image. Finally, how can I save the image from the UIView to a UIImage with size (3264, 2448) without losing image quality?
If this is not the best approach, please recommend a better way to accomplish this.
class DrawingView: UIImageView {
private var pts = [CGPoint](count: 5, repeatedValue: CGPoint())
private var ctr: uint!
var lineWidth: CGFloat = 4.0
var lineColor: UIColor = UIColor.darkGrayColor()
required init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
init(frame: CGRect, image: UIImage) {
super.init(frame: frame)
self.image = image
beginDrawingView()
}
private func beginDrawingView() {
userInteractionEnabled = true
}
override func touchesBegan(touches: Set<NSObject>, withEvent event: UIEvent) {
ctr = 0
if let touch = touches.first as? UITouch {
pts[0] = touch.locationInView(self) as CGPoint
}
}
override func touchesMoved(touches: Set<NSObject>, withEvent event: UIEvent) {
if let touch = touches.first as? UITouch {
let p: CGPoint = touch.locationInView(self)
ctr = ctr + 1
pts[Int(ctr)] = p
if ctr == 4 {
pts[3] = CGPointMake((pts[2].x + pts[4].x)/2.0, (pts[2].y + pts[4].y)/2.0);
UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0.0)
let context = UIGraphicsGetCurrentContext()
image!.drawInRect(CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height))
CGContextSaveGState(context)
CGContextSetShouldAntialias(context, true)
CGContextSetLineCap(context, kCGLineCapRound)
CGContextSetLineWidth(context, lineWidth)
CGContextSetStrokeColorWithColor(context, lineColor.CGColor)
let path = CGPathCreateMutable()
CGPathMoveToPoint(path, nil, pts[0].x, pts[0].y)
CGPathAddCurveToPoint(path, nil, pts[1].x, pts[1].y, pts[2].x, pts[2].y, pts[3].x, pts[3].y)
CGContextSetBlendMode(context, kCGBlendModeNormal)
CGContextAddPath(context, path)
CGContextStrokePath(context)
image = UIGraphicsGetImageFromCurrentImageContext()
CGContextRestoreGState(context)
UIGraphicsEndImageContext()
pts[0] = pts[3]
pts[1] = pts[4]
ctr = 1
}
}
}
}
I'm not sure if this is exactly what you are looking for, but if you already have a UIImage object, you can just save it as a Base64String. Here is some code that I use for this (you'll notice that I also perform compression on any JPG images because I'm saving the string to a database, but of course you wouldn't need that component since you don't want to lose image quality):
//Convert the image to Base64String with optional JPG compression
//
-(NSString*)convertImageToBase64String:(UIImage*)image withImageType:(NSString*)imageType
{
NSData* data;
if ([imageType isEqualToString:#"PNG"]) {
data = UIImagePNGRepresentation(image);
} else if ([imageType isEqualToString:#"JPG"] || [imageType isEqualToString:#"JPEG"]) {
data = UIImageJPEGRepresentation(image, 0.9);
}
return [data base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength];
}
If you don't have the UIImage object yet, though, you could get it by taking a screenshot like this:
+ (UIImage *) imageWithView:(UIView *)view
{
UIGraphicsBeginImageContextWithOptions(view.bounds.size, view.opaque, 0.0);
[view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage * img = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return img;
}
I haven't tested the last block of code, but I suspect it will give you a screenshot of the ENTIRE screen, so you would need to the view.bounds.size component to get the exact area you are looking for. Hope that helps!

Allowing users to draw rect on a UIImage, with the intention of cropping the image

I'm sure this has been asked a number of times from various different perspectives, but I'm unable to find an answer on here as yet.
What I want to achieve
What I would like to do is to display a UIImage, and allow the user to draw a rectangle on the image, and eventually crop their selection.
Research so far
I've found previous questions here on SO that handle the cropping, however they often deal with static cropping areas that don't change, this does lead to the following constraints of such mechanism
The area of the image you're interested in may be positioned
anywhere, for example if you're trying to crop a road sign, it may
be centered on one image, but aligned left on another, therefore you
can't predict which area to crop.
The size and scale of the interested area may change, for example
one image may be a close up of that road sign so the cropping area
would be larger, but another image may have been taken from a
distance meaning the cropping area would be smaller.
With the combination of the above two variables, its almost impossible to accurately predict where the area of interest in the image would be, so I'm relying on the user to define this by being able to "draw" a box around the area we're interested in, in this case, a road sign.
This is all peachy on Android, since you can delegate all the hard work out with a nice intent, such as :
Intent intent = new Intent("com.android.camera.action.CROP");
However, I can't find an equivalent for iOS.
I've found this bit of code from this source :
- (UIImage *)imageByDrawingCircleOnImage:(UIImage *)image
{
// begin a graphics context of sufficient size
UIGraphicsBeginImageContext(image.size);
// draw original image into the context
[image drawAtPoint:CGPointZero];
// get the context for CoreGraphics
CGContextRef ctx = UIGraphicsGetCurrentContext();
// set stroking color and draw circle
[[UIColor redColor] setStroke];
// make circle rect 5 px from border
CGRect circleRect = CGRectMake(0, 0,
image.size.width,
image.size.height);
circleRect = CGRectInset(circleRect, 5, 5);
// draw circle
CGContextStrokeEllipseInRect(ctx, circleRect);
// make image out of bitmap context
UIImage *retImage = UIGraphicsGetImageFromCurrentImageContext();
// free the context
UIGraphicsEndImageContext();
return retImage;
}
Which I believe is a good starting point for cropping (it does crop circles however), it does rely on predefining the area you want to crop when calling CGRectMake.
This previous question also details how to do the actual cropping.
I'm assuming that to allow the user to draw the rect, I'd need to integrate with Gestures?
The Question :
How can I allow the user, to draw a rect over an image view, with the intent of cropping that area?
You could give BJImageCropper a try:
A simple UIView subclass that allows a user to crop an image. If you use it, I'd love to know! Twitter: #barrettjacobsen
This post is already 5 years old, but future references, this is how I managed to get it done. Following code is a combination of Rob's answer and some image cropping
Xcode 9 and Swift 4 are being used here
Add 2 ViewControllers
Add ImageView and 2 buttons for first view controller and another image view for last view controller
link all views to source file
View controller
import UIKit
extension UIView {
func snapshot(afterScreenUpdates: Bool = false) -> UIImage {
UIGraphicsBeginImageContextWithOptions(bounds.size, isOpaque, 0)
drawHierarchy(in: bounds, afterScreenUpdates: afterScreenUpdates)
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return image
}
}
extension UIImage {
func crop( rect: CGRect) -> UIImage {
var rect = rect
rect.origin.x*=self.scale
rect.origin.y*=self.scale
rect.size.width*=self.scale
rect.size.height*=self.scale
let imageRef = self.cgImage!.cropping(to: rect)
let image = UIImage(cgImage: imageRef!, scale: self.scale, orientation: self.imageOrientation)
return image
}
}
class ViewController: UIViewController {
var rec: CGRect!
var cropImage: UIImage!
#IBOutlet weak var imageView: UIImageView!
private let shapeLayer: CAShapeLayer = {
let _shapeLayer = CAShapeLayer()
_shapeLayer.fillColor = UIColor.clear.cgColor
_shapeLayer.strokeColor = UIColor.green.cgColor
_shapeLayer.lineWidth = 2
return _shapeLayer
}()
private var startPoint: CGPoint!
override func viewDidLoad() {
super.viewDidLoad()
imageView.layer.addSublayer(shapeLayer)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
clear()
startPoint = touches.first?.location(in: imageView)
}
func clear() {
imageView.layer.sublayers = nil
imageView.image = UIImage(named: "aa")
imageView.layer.addSublayer(shapeLayer)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let startPoint = startPoint, let touch = touches.first else { return }
let point: CGPoint
if let predictedTouch = event?.predictedTouches(for: touch)?.last {
point = predictedTouch.location(in: imageView)
} else {
point = touch.location(in: imageView)
}
updatePath(from: startPoint, to: point)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let startPoint = startPoint, let touch = touches.first else { return }
let point = touch.location(in: imageView)
updatePath(from: startPoint, to: point)
imageView.image = imageView.snapshot(afterScreenUpdates: true)
shapeLayer.path = nil
}
override func touchesCancelled(_ touches: Set<UITouch>, with event: UIEvent?) {
shapeLayer.path = nil
}
private func updatePath(from startPoint: CGPoint, to point: CGPoint) {
let size = CGSize(width: point.x - startPoint.x, height: point.y - startPoint.y)
rec = CGRect(origin: startPoint, size: size)
shapeLayer.path = UIBezierPath(rect: rec).cgPath
}
#IBAction func btnTapped(_ sender: Any) {
clear()
}
#IBAction func btnCropTapped(_ sender: Any) {
let newRec = CGRect(origin: CGPoint(x: rec.origin.x, y: rec.origin.y), size: rec.size)
cropImage = imageView.image?.crop(rect: newRec)
print(rec)
print(newRec)
self.performSegue(withIdentifier: "toImage", sender: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toImage" {
if let destination = segue.destination as? ImageViewController {
destination.croppedImage = cropImage
}
}
}
}

Resources