How to take screen shot of UIImageiew using image with aspect fit? - ios

I want to take screen shot from imageview with image aspect fit. I tried with this code but it will not get actual result. And I want get screen shot of actual image size
func captureScreen() -> UIImage
{
let layer = self.layer
let scale = UIScreen.main.scale
UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, scale);
guard let context = UIGraphicsGetCurrentContext() else {return UIImage.init(named: "")!}
layer.render(in:context)
let screenshotImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return screenshotImage!
}
This is what I get
I want to screen shot with actual with full actual image size
Please help me

//MARK:- Calculate ImageviewWith image size
func convertImageViewToImagebounds(image:UIImage, imageView:UIImageView) {
if image.size.height > image.size.width {
let ratio = image.size.height / image.size.width
let newWidth = imageView.frame.size.height / ratio
imageView.frame.size = CGSize(width: newWidth, height: imageView.frame.size.height)
imageView.center = CGPoint(x: viewImgViewContainer.frame.width / 2.0, y: viewImgViewContainer.frame.height / 2.0)
}else if image.size.height < image.size.width {
let ratio = image.size.width / image.size.height
let newHeight = imageView.frame.size.width / ratio
imageView.frame.size = CGSize(width: imageView.frame.size.width, height: newHeight)
imageView.center = CGPoint(x: viewImgViewContainer.frame.width / 2.0, y: viewImgViewContainer.frame.height / 2.0)
}else {
let ratio = image.size.width / image.size.height
let newHeight = image.size.width / ratio
imageView.frame.size = CGSize(width: image.size.width, height: newHeight)
imageView.center = CGPoint(x: viewImgViewContainer.frame.width / 2.0, y: viewImgViewContainer.frame.height / 2.0)
}
}
take a look here:
this function will return your imageview size as image have and viewImgViewContainer is parent view of imageview to set imageview in center.

Related

how to get the rect of an image to crop it

I am building a app where you can crop multiple images. I am using this code directly from apple:
func cropImage(_ inputImage: UIImage, toRect cropRect: CGRect, viewWidth: CGFloat, viewHeight: CGFloat) -> UIImage?
{
let imageViewScale = max(inputImage.size.width / viewWidth,
inputImage.size.height / viewHeight)
// Scale cropRect to handle images larger than shown-on-screen size
let cropZone = CGRect(x:cropRect.origin.x * imageViewScale,
y:cropRect.origin.y * imageViewScale,
width:cropRect.size.width * imageViewScale,
height:cropRect.size.height * imageViewScale)
// Perform cropping in Core Graphics
guard let cutImageRef: CGImage = inputImage.cgImage?.cropping(to:cropZone)
else {
return nil
}
// Return image to UIImage
let croppedImage: UIImage = UIImage(cgImage: cutImageRef)
return croppedImage
}
to crop the image I need a cropRect. I found also a solution in the Internet that I implemented in my code:
func realImageRect() -> CGRect {
let imageViewSize = self.frame.size
let imgSize = self.image?.size
guard let imageSize = imgSize else {
return CGRect.zero
}
let scaleWidth = imageViewSize.width / imageSize.width
let scaleHeight = imageViewSize.height / imageSize.height
let aspect = fmin(scaleWidth, scaleHeight)
var imageRect = CGRect(x: 0, y: 0, width: imageSize.width * aspect, height: imageSize.height * aspect)
// Center image
imageRect.origin.x = (imageViewSize.width - imageRect.size.width) / 2
imageRect.origin.y = (imageViewSize.height - imageRect.size.height) / 2
// Add imageView offset
imageRect.origin.x += self.frame.origin.x
imageRect.origin.y += self.frame.origin.y
return imageRect
}
As I already said, the app can crop multiple images. The images are stored in a array. I also have a crop view, which you can drag around the image with a pan gesture
for i in 0..<imageContentView.count {
let cropRect = CGRect(x: croppedViewArray[i].frame.origin.x - imageContentView[i].realImageRect().origin.x, y: croppedViewArray[i].frame.origin.y - imageContentView[i].realImageRect().origin.y, width: croppedViewArray[i].frame.width, height: croppedViewArray[i].frame.height)
print("cropRect", cropRect)
let croppedImage = ImageCrophandler.sharedInstance.cropImage(imageContentView[i].image!, toRect: cropRect, viewWidth: imageContentView[i].frame.width, viewHeight: imageContentView[i].frame.height)
print("cheight", croppedImage!.size.height,"cwidth", croppedImage!.size.width)
arrayOfCropedImages.append(croppedImage!)
}
The problem what I have is, that every cropped image has a different height and widths, but the images should be all the same size.
I figured out that the size gets calculated on which position the crop view is located.

How to get x and y position of UIImage in UIImageView?

I want to get original x and y position of UIImage when we set it in UIImageView with scaleAspectFill.
As we know in scaleAspectFill, some of the portion is clipped. So as per my requirement I want to get x and y value (it may be - value I don't know.).
Here is the original image from gallery
Now I am setting this above image to my app view.
So as above situation, I want to get it's hidden x, y position of image which are clipped.
Can any one tell how to get it?
Use following extension
extension UIImageView {
var imageRect: CGRect {
guard let imageSize = self.image?.size else { return self.frame }
let scale = UIScreen.main.scale
let imageWidth = (imageSize.width / scale).rounded()
let frameWidth = self.frame.width.rounded()
let imageHeight = (imageSize.height / scale).rounded()
let frameHeight = self.frame.height.rounded()
let ratio = max(frameWidth / imageWidth, frameHeight / imageHeight)
let newSize = CGSize(width: imageWidth * ratio, height: imageHeight * ratio)
let newOrigin = CGPoint(x: self.center.x - (newSize.width / 2), y: self.center.y - (newSize.height / 2))
return CGRect(origin: newOrigin, size: newSize)
}
}
Usage
let rect = imageView.imageRect
print(rect)
UI Test
let testView = UIView(frame: rect)
testView.backgroundColor = UIColor.red.withAlphaComponent(0.5)
imageView.superview?.addSubview(testView)
Use below extension to find out accurate details of Image in ImageView.
extension UIImageView {
var contentRect: CGRect {
guard let image = image else { return bounds }
guard contentMode == .scaleAspectFit else { return bounds }
guard image.size.width > 0 && image.size.height > 0 else { return bounds }
let scale: CGFloat
if image.size.width > image.size.height {
scale = bounds.width / image.size.width
} else {
scale = bounds.height / image.size.height
}
let size = CGSize(width: image.size.width * scale, height: image.size.height * scale)
let x = (bounds.width - size.width) / 2.0
let y = (bounds.height - size.height) / 2.0
return CGRect(x: x, y: y, width: size.width, height: size.height)
}
}
How to test
let rect = imgTest.contentRect
print("Image rect:", rect)
Reference: https://www.hackingwithswift.com/example-code/uikit/how-to-find-an-aspect-fit-images-size-inside-an-image-view
If you want to show image like it shows in gallery then you can use contraints
"H:|[v0]|" and "V:|[v0]|" and in imageview use .aspectFit
And if you want the image size you can use imageView.image!.size and calculate the amount of image which is getting cut. In aspectFill the width is matched to screenwidth and accordingly the height gets increased. So I guess you can find how how much amount of image is getting cut.
Try this Library ImageCoordinateSpace
I am not sure if it works for you or not, but it has a feature to convert CGPoint from image coordinates to any view coordinates and vice versa.

Resizing the image to Aspect Fit

I am trying to resize images using the following popular code and it is resizing the image but it is resizing the image as Scale to Fill, I would like to resize them as Aspect Fit. How do I do that?
func resizeImage(image: UIImage, newSize: CGSize) -> (UIImage) {
let newRect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height).integral
UIGraphicsBeginImageContextWithOptions(newSize, true, 0)
let context = UIGraphicsGetCurrentContext()
// Set the quality level to use when rescaling
context!.interpolationQuality = CGInterpolationQuality.default
let flipVertical = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: newSize.height )
context!.concatenate(flipVertical)
// Draw into the context; this scales the image
context?.draw(image.cgImage!, in: CGRect(x: 0.0,y: 0.0, width: newRect.width, height: newRect.height))
let newImageRef = context!.makeImage()! as CGImage
let newImage = UIImage(cgImage: newImageRef)
// Get the resized image from the context and a UIImage
UIGraphicsEndImageContext()
return newImage
}
I have already set the content mode of image to Aspect Fit but still it is not working.
This is how I called the above code in my collection view controller
cell.imageView.image = UIImage(named: dogImages[indexPath.row])?.resizeImage(image: UIImage(named: dogImages[indexPath.row])
I manually selected my image in storyboard and set its content mode to apsect fit
Did you tried setting the newSize in aspect ratio of original Image size. If you want width fix calculate the height as per width and if you want height fix then calculate width as per height
Calculate height when width is fix:
let fixedWidth: CGFloat = 200
let newHeight = fixedWidth * image.size.height / image.size.width
let convertedImage = resizeImage(image: image, newSize: CGSize(width: fixedWidth, height: newHeight))
Calculate width when height is fix:
let fixedheight: CGFloat = 200
let newWidth = fixedheight * image.size.width / image.size.height
let convertedImage = resizeImage(image: image, newSize: CGSize(width: newWidth, height: fixedheight))
You can use this resized image with aspect fit ratio.
also check the answer: https://stackoverflow.com/a/8858464/2677551, that may help
func scaleImageAspectFit(newSize: CGSize) -> UIImage? {
var scaledImageRect: CGRect = CGRect.zero
let aspectWidth: CGFloat = newSize.width / size.width
let aspectHeight: CGFloat = newSize.height / size.height
let aspectRatio: CGFloat = min(aspectWidth, aspectHeight)
scaledImageRect.size.width = size.width * aspectRatio
scaledImageRect.size.height = size.height * aspectRatio
scaledImageRect.origin.x = (newSize.width - scaledImageRect.size.width) / 2.0
scaledImageRect.origin.y = (newSize.height - scaledImageRect.size.height) / 2.0
UIGraphicsBeginImageContextWithOptions(newSize, false, 0)
if UIGraphicsGetCurrentContext() != nil {
draw(in: scaledImageRect)
let scaledImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return scaledImage
}
return nil
}
Usage :
let resizedImage = oldImage.scaleImageAspectFit(newSize: CGSize(width: nexSize.width, height: nexSize.height))

Resize an image with drawInRect while maintaining the aspect ratio like Scale Aspect Fill?

I would like to resize an image with drawInRect method, but I would also like to maintain the right aspect ratio, while filling completely the given frame (as .ScaleAspectFill does for UIViewContentMode).
Anyone has a ready answer for this?
Here is my code (pretty straightforward...):
func scaled100Image() -> UIImage {
let newSize = CGSize(width: 100, height: 100)
UIGraphicsBeginImageContext(newSize)
self.pictures[0].drawInRect(CGRect(x: 0, y: 0, width: 100, height: 100))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage
}
OK, so no ready-made answer... I wrote a swift extension for UIImage, feel free to use it if you need it.
Here it is:
extension UIImage {
func drawInRectAspectFill(rect: CGRect) {
let targetSize = rect.size
if targetSize == .zero {
self.draw(in: rect)
}
let widthRatio = targetSize.width / self.size.width
let heightRatio = targetSize.height / self.size.height
let scalingFactor = max(widthRatio, heightRatio)
let newSize = CGSize(width: self.size.width * scalingFactor,
height: self.size.height * scalingFactor)
UIGraphicsBeginImageContext(targetSize)
let origin = CGPoint(x: (targetSize.width - newSize.width) / 2,
y: (targetSize.height - newSize.height) / 2)
self.draw(in: CGRect(origin: origin, size: newSize))
let scaledImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
scaledImage?.draw(in: rect)
}
}
So in the example above, you use it like that:
self.pictures[0].drawInRectAspectFill(CGRect(x: 0, y: 0, width: 100, height: 100))
The Objective-C version, if someone need it(Paste this code inside a UIIMage category):
- (void) drawInRectAspectFill:(CGRect) recto {
CGSize targetSize = recto.size;
if (targetSize.width <= CGSizeZero.width && targetSize.height <= CGSizeZero.height ) {
return [self drawInRect:recto];
}
float widthRatio = targetSize.width / self.size.width;
float heightRatio = targetSize.height / self.size.height;
float scalingFactor = fmax(widthRatio, heightRatio);
CGSize newSize = CGSizeMake(self.size.width * scalingFactor, self.size.height * scalingFactor);
UIGraphicsBeginImageContext(targetSize);
CGPoint origin = CGPointMake((targetSize.width-newSize.width)/2,(targetSize.height - newSize.height) / 2);
[self drawInRect:CGRectMake(origin.x, origin.y, newSize.width, newSize.height)];
UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
[scaledImage drawInRect:recto];
}

UIImageView get the position of the showing Image

I have a UIImageView which shows an UIImage.
The UIImage may change to other UIImage in different size, and the position and the size of the UIImage inside will change according according to it.
My Problem is that i'm trying add a view that will be at the end of the UIImage (which change all the time) and all I can get is the frame of the UIImageView (which stay full screen all the time).
How can i get the "frame" of current showing UIImage ?
Swift 4.2 & 5.0
func calculateRectOfImageInImageView(imageView: UIImageView) -> CGRect {
let imageViewSize = imageView.frame.size
let imgSize = imageView.image?.size
guard let imageSize = imgSize else {
return CGRect.zero
}
let scaleWidth = imageViewSize.width / imageSize.width
let scaleHeight = imageViewSize.height / imageSize.height
let aspect = fmin(scaleWidth, scaleHeight)
var imageRect = CGRect(x: 0, y: 0, width: imageSize.width * aspect, height: imageSize.height * aspect)
// Center image
imageRect.origin.x = (imageViewSize.width - imageRect.size.width) / 2
imageRect.origin.y = (imageViewSize.height - imageRect.size.height) / 2
// Add imageView offset
imageRect.origin.x += imageView.frame.origin.x
imageRect.origin.y += imageView.frame.origin.y
return imageRect
}
Swift 3.0
// MARK: - Create Rect
func calculateRectOfImageInImageView(imageView: UIImageView) -> CGRect {
let imageViewSize = imageView.frame.size
let imgSize = imageView.image?.size
guard let imageSize = imgSize, imgSize != nil else {
return CGRect.zero
}
let scaleWidth = imageViewSize.width / imageSize.width
let scaleHeight = imageViewSize.height / imageSize.height
let aspect = fmin(scaleWidth, scaleHeight)
var imageRect = CGRect(x: 0, y: 0, width: imageSize.width * aspect, height: imageSize.height * aspect)
// Center image
imageRect.origin.x = (imageViewSize.width - imageRect.size.width) / 2
imageRect.origin.y = (imageViewSize.height - imageRect.size.height) / 2
// Add imageView offset
imageRect.origin.x += imageView.frame.origin.x
imageRect.origin.y += imageView.frame.origin.y
return imageRect
}
For Swift < 3.0
Here is the above method in Swift. Again, assuming that contentMode is set to .ScaleAspectFit If there is no image on the given imageView CGRectZero will be returned.
func calculateRectOfImageInImageView(imageView: UIImageView) -> CGRect {
let imageViewSize = imageView.frame.size
let imgSize = imageView.image?.size
guard let imageSize = imgSize where imgSize != nil else {
return CGRectZero
}
let scaleWidth = imageViewSize.width / imageSize.width
let scaleHeight = imageViewSize.height / imageSize.height
let aspect = fmin(scaleWidth, scaleHeight)
var imageRect = CGRect(x: 0, y: 0, width: imageSize.width * aspect, height: imageSize.height * aspect)
// Center image
imageRect.origin.x = (imageViewSize.width - imageRect.size.width) / 2
imageRect.origin.y = (imageViewSize.height - imageRect.size.height) / 2
// Add imageView offset
imageRect.origin.x += imageView.frame.origin.x
imageRect.origin.y += imageView.frame.origin.y
return imageRect
}
The following will answer your question, assuming your UIImageView used UIViewContentModeAspectFit:
You have to regard the image sizing of the image inside UIImageView. This depends on how you set the contentMode. According your description, I assume you are using UIViewContentModeAspectFit. The resulting image will also be centered in the UIImageView so you also have to consider this for the calculation.
-(CGRect )calculateClientRectOfImageInUIImageView:(UIImageView *)imgView
{
CGSize imgViewSize=imgView.frame.size; // Size of UIImageView
CGSize imgSize=imgView.image.size; // Size of the image, currently displayed
// Calculate the aspect, assuming imgView.contentMode==UIViewContentModeScaleAspectFit
CGFloat scaleW = imgViewSize.width / imgSize.width;
CGFloat scaleH = imgViewSize.height / imgSize.height;
CGFloat aspect=fmin(scaleW, scaleH);
CGRect imageRect={ {0,0} , { imgSize.width*=aspect, imgSize.height*=aspect } };
// Note: the above is the same as :
// CGRect imageRect=CGRectMake(0,0,imgSize.width*=aspect,imgSize.height*=aspect) I just like this notation better
// Center image
imageRect.origin.x=(imgViewSize.width-imageRect.size.width)/2;
imageRect.origin.y=(imgViewSize.height-imageRect.size.height)/2;
// Add imageView offset
imageRect.origin.x+=imgView.frame.origin.x;
imageRect.origin.y+=imgView.frame.origin.y;
return(imageRect);
}
For a better illustration of the differences between the three content modes, see below:
I recommend using built in function AVMakeRectWithAspectRatio.
func AVMakeRectWithAspectRatioInsideRect(_ aspectRatio: CGSize, _ boundingRect: CGRect) -> CGRect
Parameters:
aspectRatio:
The width and height ratio (aspect ratio) you want to maintain.
boundingRect:
The bounding rectangle you want to fit into.
Return Value
Returns a scaled CGRect that maintains the aspect ratio specified by aspectRatio that fits within bounding Rect.
let boundingBox = AVMakeRectWithAspectRatioInsideRect(backgroundImage.size, frame)
Based on the wonderfully simple solution from Janusz, here's what I did:
let visibleRect = AVMakeRect(aspectRatio: CGSize(width: image.size.width, height: image.size.height), insideRect: self.frame)
if visibleRect.contains(point) {
// Do something great here...
}
Swift 3.0
I know its quite late but might help someone in future. Its very simple and inbuilt solution provided by iOS. Just need to:
import AVFoundation
let imageRect = AVMakeRect(aspectRatio: image.size, insideRect: self.imageView.bounds)

Resources