Any way to speed this UILabel blur code? - ios

Here is the code and it is really slow, like seconds slow to render about 25 labels.
extension UILabel{
func deBlur(){
for subview in self.subviews {
if (subview.tag == 99999) {
subview.removeFromSuperview()
}
}
}
func blur(){
let blurRadius:CGFloat = 5.1
UIGraphicsBeginImageContext(bounds.size)
layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
let blurFilter = CIFilter(name: "CIGaussianBlur")
blurFilter?.setDefaults()
let imageToBlur = CIImage(cgImage: (image?.cgImage)!)
blurFilter?.setValue(imageToBlur, forKey: kCIInputImageKey)
blurFilter?.setValue(blurRadius, forKey: "inputRadius")
let outputImage: CIImage? = blurFilter?.outputImage
let context = CIContext(options: nil)
let cgimg = context.createCGImage(outputImage!, from: (outputImage?.extent)!)
layer.contents = cgimg!
}
}
Any image / UIGraphics gurus know why this is so sloooow?
UPDATE: This line of code is the culprit. However, it is also needed to create the blur effect.
let cgimg = UILabel.context.createCGImage(outputImage!, from: (outputImage?.extent)!)

Related

UIGraphicsImageRenderer mirrors image after applying filter

I'm trying to apply filters on images.
Applying the filter works great, but it mirrors the image vertically.
The bottom row of images calls the filter function after init.
The main image at the top, gets the filter applied after pressing on one at the bottom
The ciFilter is CIFilter.sepiaTone().
func applyFilter(image: UIImage) -> UIImage? {
let rect = CGRect(origin: CGPoint.zero, size: image.size)
let renderer = UIGraphicsImageRenderer(bounds: rect)
ciFilter.setValue(CIImage(image: image), forKey: kCIInputImageKey)
let image = renderer.image { context in
let ciContext = CIContext(cgContext: context.cgContext, options: nil)
if let outputImage = ciFilter.outputImage {
ciContext.draw(outputImage, in: rect, from: rect)
}
}
return image
}
And after applying the filter twice, the new image gets zoomed in.
Here are some screenshots.
You don't need to use UIGraphicsImageRenderer.
You can directly get the image from CIContext.
func applyFilter(image: UIImage) -> UIImage? {
ciFilter.setValue(CIImage(image: image), forKey: kCIInputImageKey)
guard let ciImage = ciFilter.outputImage else {
return nil
}
let outputCGImage = CIContext().createCGImage(ciImage, from: ciImage.extent)
guard let _ = outputCGImage else { return nil }
let filteredImage = UIImage(cgImage: outputCGImage!, scale: image.scale, orientation: image.imageOrientation)
return filteredImage
}

GussianBlur image with scaleAspectFill

I want to use Gaussianblur on an image, but also i want to use my imageview scalemode's scaleAspectFill.
I am blurring my image with following code:
func getImageWithBlur(image: UIImage) -> UIImage?{
let context = CIContext(options: nil)
guard let currentFilter = CIFilter(name: "CIGaussianBlur") else {
return nil
}
let beginImage = CIImage(image: image)
currentFilter.setValue(beginImage, forKey: kCIInputImageKey)
currentFilter.setValue(6.5, forKey: "inputRadius")
guard let output = currentFilter.outputImage, let cgimg = context.createCGImage(output, from: output.extent) else {
return nil
}
return UIImage(cgImage: cgimg)
}
But this is not working with scaleAspectFill mode.
They are both same images. But when i blur the second image, as you can see it is adding space from top and bottom. What should i do for fit well when using blur image too?
When you apply a CIGaussianBlur filter, the resulting image is larger than the original. This is because the blur is applied to the edges.
To get back an image at the original size, you need to use the original image extent.
Note, though, the blur is applied both inside and outside the edge, so if you clip only to the original extent, the edge will effectively "fade out". To avoid the edges altogether, you'll need to clip farther in.
Here is an example, using a UIImage extension to blur either with or without blurred edges:
extension UIImage {
func blurredImageWithBlurredEdges(inputRadius: CGFloat) -> UIImage? {
guard let currentFilter = CIFilter(name: "CIGaussianBlur") else {
return nil
}
guard let beginImage = CIImage(image: self) else {
return nil
}
currentFilter.setValue(beginImage, forKey: kCIInputImageKey)
currentFilter.setValue(inputRadius, forKey: "inputRadius")
guard let output = currentFilter.outputImage else {
return nil
}
// UIKit and UIImageView .contentMode doesn't play well with
// CIImage only, so we need to back the return UIImage with a CGImage
let context = CIContext()
// cropping rect because blur changed size of image
guard let final = context.createCGImage(output, from: beginImage.extent) else {
return nil
}
return UIImage(cgImage: final)
}
func blurredImageWithClippedEdges(inputRadius: CGFloat) -> UIImage? {
guard let currentFilter = CIFilter(name: "CIGaussianBlur") else {
return nil
}
guard let beginImage = CIImage(image: self) else {
return nil
}
currentFilter.setValue(beginImage, forKey: kCIInputImageKey)
currentFilter.setValue(inputRadius, forKey: "inputRadius")
guard let output = currentFilter.outputImage else {
return nil
}
// UIKit and UIImageView .contentMode doesn't play well with
// CIImage only, so we need to back the return UIImage with a CGImage
let context = CIContext()
// cropping rect because blur changed size of image
// to clear the blurred edges, use a fromRect that is
// the original image extent insetBy (negative) 1/2 of new extent origins
let newExtent = beginImage.extent.insetBy(dx: -output.extent.origin.x * 0.5, dy: -output.extent.origin.y * 0.5)
guard let final = context.createCGImage(output, from: newExtent) else {
return nil
}
return UIImage(cgImage: final)
}
}
and here is an example View Controller showing how to use it, and the different results:
class BlurTestViewController: UIViewController {
let imgViewA = UIImageView()
let imgViewB = UIImageView()
let imgViewC = UIImageView()
override func viewDidLoad() {
super.viewDidLoad()
let stackView = UIStackView()
stackView.axis = .vertical
stackView.alignment = .fill
stackView.distribution = .fillEqually
stackView.spacing = 8
stackView.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(stackView)
NSLayoutConstraint.activate([
stackView.widthAnchor.constraint(equalToConstant: 200.0),
stackView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
stackView.centerYAnchor.constraint(equalTo: view.centerYAnchor),
])
[imgViewA, imgViewB, imgViewC].forEach { v in
v.backgroundColor = .red
v.contentMode = .scaleAspectFill
v.clipsToBounds = true
// square image views (1:1 ratio)
v.heightAnchor.constraint(equalTo: v.widthAnchor, multiplier: 1.0).isActive = true
stackView.addArrangedSubview(v)
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let imgA = UIImage(named: "bkg640x360") else {
fatalError("Could not load image!")
}
guard let imgB = imgA.blurredImageWithBlurredEdges(inputRadius: 6.5) else {
fatalError("Could not create Blurred image with Blurred Edges")
}
guard let imgC = imgA.blurredImageWithClippedEdges(inputRadius: 6.5) else {
fatalError("Could not create Blurred image with Clipped Edges")
}
imgViewA.image = imgA
imgViewB.image = imgB
imgViewC.image = imgC
}
}
Using this original 640x360 image, with 200 x 200 image views:
We get this output:
Also worth mentioning - although I'm sure you've already noticed - these functions run very slowly on the simulator, but very quickly on an actual device.
I believe your issue is that the convolution kernel of the CIFilter is creating additional data as it applies the blur to the edges of the image. The CIContext isn't a strictly bounded space and is able to use area around the image to fully process all output. So rather than using output.extent in createCGImage, use the size of the input image (converted to a CGRect).
To account for the blurred alpha channel along the image edge, you can use the CIImage.unpremultiplyingAlpha().settingAlphaOne() methods to flatten the image before returning.
func getImageWithBlur(image: UIImage) -> UIImage? {
let context = CIContext(options: nil)
guard let currentFilter = CIFilter(name: "CIGaussianBlur") else { return nil }
let beginImage = CIImage(image: image)
currentFilter.setValue(beginImage, forKey: kCIInputImageKey)
currentFilter.setValue(6.5, forKey: "inputRadius")
let rect = CGRect(x: 0.0, y: 0.0, width: image.size.width, height: image.size.height)
guard let output = currentFilter.outputImage?.unpremultiplyingAlpha().settingAlphaOne(in: rect) else { return nil }
guard let cgimg = context.createCGImage(output, from: rect) else { return nil }
print("image.size: \(image.size)")
print("output.extent: \(output.extent)")
return UIImage(cgImage: cgimg)
}

Applying CIGaussianBlur to UIImage not working properly

I want a blur effect to UIImage as slider value changes.
I am using the CIGaussianBlur filter to blur the image.
The code is as follows
func applyBlurFilter(aCIImage: CIImage, val: CGFloat) -> UIImage {
let clampFilter = CIFilter(name: "CIAffineClamp")
clampFilter?.setDefaults()
clampFilter?.setValue(aCIImage, forKey: kCIInputImageKey)
let blurFilter = CIFilter(name: "CIGaussianBlur")
blurFilter?.setValue(clampFilter?.outputImage, forKey: kCIInputImageKey)
blurFilter?.setValue(val, forKey: kCIInputRadiusKey)
let rect = aCIImage.extent
if let output = blurFilter?.outputImage {
if let cgimg = self.context.createCGImage(output, from: rect) {
let processedImage = UIImage(cgImage: cgimg)
return processedImage
}
}
return image ?? self.image
}
Note: I've also tried the below code using CICrop filter
func applyBlurFilter(beginImage: CIImage, value: Float) -> UIImage? {
let currentFilter = CIFilter(name: "CIGaussianBlur")
currentFilter?.setValue(beginImage, forKey: kCIInputImageKey)
currentFilter?.setValue(value, forKey: kCIInputRadiusKey)
let cropFilter = CIFilter(name: "CICrop")
cropFilter?.setValue(currentFilter!.outputImage, forKey: kCIInputImageKey)
cropFilter?.setValue(CIVector(cgRect: beginImage!.extent), forKey: "inputRectangle")
let output = cropFilter?.outputImage
let context = CIContext(options: nil)
let cgimg = self.context.createCGImage(output!, from: beginImage!.extent)
let processedImage = UIImage(cgImage: cgimg!)
return processedImage
}
The code works perfectly with some images, but with bigger images, while applying the blur filter to the image, the image's right edges get transparent which I don't want.
Note: I am running this on device
What am I doing wrong here, I have no idea
The image whose right edge gets transparant
Result after applying GaussianBlur to the above image
Thanks!!
Well, you're doing something wrong somewhere. The absolute best advice I can give you in your career is to create a small test project to experiment when you have such an issue - I've done this for 15 years in the Apple world, and its been of enormous help.
I created a project here so you don't have to (this time). I downloaded the image, placed it in an ImageView, and it looked perfect (as expected). I then used your code (except I had to create a context, and guess at radius values, then ran it. Image looks perfect with a blur of 0, 5, 10, and 25.
Obviously the issue is something else you are doing. What I suggest is that you keep adding to the test project until you can find what step is the problem (context? other image processing?)
This is the entirety of my code:
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
let im1 = UIImage(named: "Image.jpg")!
let cim = CIImage(image: im1)!
let im2 = applyBlurFilter(aCIImage: cim, val: 25)
let iv = UIImageView(image: im2)
iv.contentMode = .scaleToFill
self.view.addSubview(iv)
}
func applyBlurFilter(aCIImage: CIImage, val: CGFloat) -> UIImage {
let clampFilter = CIFilter(name: "CIAffineClamp")
clampFilter?.setDefaults()
clampFilter?.setValue(aCIImage, forKey: kCIInputImageKey)
let blurFilter = CIFilter(name: "CIGaussianBlur")
blurFilter?.setValue(clampFilter?.outputImage, forKey: kCIInputImageKey)
blurFilter?.setValue(val, forKey: kCIInputRadiusKey)
let rect = aCIImage.extent
if let output = blurFilter?.outputImage {
let context = CIContext(options: nil)
if let cgimg = context.createCGImage(output, from: rect) {
let processedImage = UIImage(cgImage: cgimg)
return processedImage
}
}
fatalError()
}
}

UIImageView contentMode not working after blur effect application

I'm attempting to set the image property of a UIImageView to an image I'm blurring with CoreImage. The code works perfectly with an unfiltered image, but when I set the background image to the filtered image, contentMode appears to stop working for the UIImageView -- instead of aspect filling, the image becomes vertically stretched. In addition to setting contentMode in code, I also set it on the storyboard but the result was the same.
I'm using Swift 2 / Xcode 7.
func updateBackgroundImage(image: UIImage) {
backgroundImage.contentMode = .ScaleAspectFill
backgroundImage.layer.masksToBounds = true
backgroundImage.image = blurImage(image)
}
func blurImage(image: UIImage) -> UIImage {
let imageToBlur = CIImage(image: image)!
let blurfilter = CIFilter(name: "CIGaussianBlur")!
blurfilter.setValue(10, forKey: kCIInputRadiusKey)
blurfilter.setValue(imageToBlur, forKey: "inputImage")
let resultImage = blurfilter.valueForKey("outputImage") as! CIImage
let croppedImage: CIImage = resultImage.imageByCroppingToRect(CGRectMake(0, 0, imageToBlur.extent.size.width, imageToBlur.extent.size.height))
let blurredImage = UIImage(CIImage: croppedImage)
return blurredImage
}
Why is filtering with CIImage causing my image to ignore contentMode and how do I fix the issue?
Solution is to replace your line:
let blurredImage = UIImage(CIImage: croppedImage)
with these 2 lines:
let context = CIContext(options: nil)
let blurredImage = UIImage (CGImage: context.createCGImage(croppedImage, fromRect: croppedImage.extent))
So your full blurImage function would look like this:
func blurImage(image: UIImage) -> UIImage {
let imageToBlur = CIImage(image: image)!
let blurfilter = CIFilter(name: "CIGaussianBlur")!
blurfilter.setValue(10, forKey: kCIInputRadiusKey)
blurfilter.setValue(imageToBlur, forKey: "inputImage")
let resultImage = blurfilter.valueForKey("outputImage") as! CIImage
let croppedImage: CIImage = resultImage.imageByCroppingToRect(CGRectMake(0, 0, imageToBlur.extent.size.width, imageToBlur.extent.size.height))
let context = CIContext(options: nil)
let blurredImage = UIImage (CGImage: context.createCGImage(croppedImage, fromRect: croppedImage.extent))
return blurredImage
}

Why iOS Core Image QR Code generator is not working?

I am trying to generate QR Code using iOS Core Image API:
func createQRForString(#data : NSData)->CIImage!{
var qrFilter = CIFilter(name: "CIQRCodeGenerator")
qrFilter.setValue(data, forKey: "inputMessage")
qrFilter.setValue("H", forKey:"inputCorrectionLevel")
return qrFilter.outputImage
}
func createNonInterpolatedImageFromCIImage(image : CIImage,withScale scale:CGFloat)->UIImage{
let cgImage = CIContext(options: nil).createCGImage(image, fromRect: image.extent())
UIGraphicsBeginImageContext(CGSizeMake(image.extent().size.width*scale, image.extent().size.height*scale))
let context = UIGraphicsGetCurrentContext()
CGContextSetInterpolationQuality(context, kCGInterpolationNone)
let scaledImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return scaledImage
}
And the following code in viewDidLoad method :
let data = "Hello World".dataUsingEncoding(NSUTF8StringEncoding)
if let image=createQRForString(data: data!){
let uiimage = createNonInterpolatedImageFromCIImage(image, withScale: 1.0)
imageView.image = uiimage
}
else{
println("Error loading image")
}
}
But it neither prints "Error" nor shows qr code in the imageView.
Here is the solution:
override func viewDidLoad() {
super.viewDidLoad()
self.imgView.image = generateCode()
}
func generateCode() -> UIImage {
let filter = CIFilter(name: "CIQRCodeGenerator")
let data = "Hello World".dataUsingEncoding(NSUTF8StringEncoding)
filter.setValue("H", forKey:"inputCorrectionLevel")
filter.setValue(data, forKey:"inputMessage")
let outputImage = filter.outputImage
let context = CIContext(options:nil)
let cgImage = context.createCGImage(outputImage, fromRect:outputImage.extent())
let image = UIImage(CGImage:cgImage, scale:1.0, orientation:UIImageOrientation.Up)
let resized = resizeImage(image!, withQuality:kCGInterpolationNone, rate:5.0)
return resized
}
func resizeImage(image: UIImage, withQuality quality: CGInterpolationQuality, rate: CGFloat) -> UIImage {
let width = image.size.width * rate
let height = image.size.height * rate
UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), true, 0)
let context = UIGraphicsGetCurrentContext()
CGContextSetInterpolationQuality(context, quality)
image.drawInRect(CGRectMake(0, 0, width, height))
let resized = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return resized;
}

Resources