Applying CIGaussianBlur to UIImage not working properly - ios

I want a blur effect to UIImage as slider value changes.
I am using the CIGaussianBlur filter to blur the image.
The code is as follows
func applyBlurFilter(aCIImage: CIImage, val: CGFloat) -> UIImage {
let clampFilter = CIFilter(name: "CIAffineClamp")
clampFilter?.setDefaults()
clampFilter?.setValue(aCIImage, forKey: kCIInputImageKey)
let blurFilter = CIFilter(name: "CIGaussianBlur")
blurFilter?.setValue(clampFilter?.outputImage, forKey: kCIInputImageKey)
blurFilter?.setValue(val, forKey: kCIInputRadiusKey)
let rect = aCIImage.extent
if let output = blurFilter?.outputImage {
if let cgimg = self.context.createCGImage(output, from: rect) {
let processedImage = UIImage(cgImage: cgimg)
return processedImage
}
}
return image ?? self.image
}
Note: I've also tried the below code using CICrop filter
func applyBlurFilter(beginImage: CIImage, value: Float) -> UIImage? {
let currentFilter = CIFilter(name: "CIGaussianBlur")
currentFilter?.setValue(beginImage, forKey: kCIInputImageKey)
currentFilter?.setValue(value, forKey: kCIInputRadiusKey)
let cropFilter = CIFilter(name: "CICrop")
cropFilter?.setValue(currentFilter!.outputImage, forKey: kCIInputImageKey)
cropFilter?.setValue(CIVector(cgRect: beginImage!.extent), forKey: "inputRectangle")
let output = cropFilter?.outputImage
let context = CIContext(options: nil)
let cgimg = self.context.createCGImage(output!, from: beginImage!.extent)
let processedImage = UIImage(cgImage: cgimg!)
return processedImage
}
The code works perfectly with some images, but with bigger images, while applying the blur filter to the image, the image's right edges get transparent which I don't want.
Note: I am running this on device
What am I doing wrong here, I have no idea
The image whose right edge gets transparant
Result after applying GaussianBlur to the above image
Thanks!!

Well, you're doing something wrong somewhere. The absolute best advice I can give you in your career is to create a small test project to experiment when you have such an issue - I've done this for 15 years in the Apple world, and its been of enormous help.
I created a project here so you don't have to (this time). I downloaded the image, placed it in an ImageView, and it looked perfect (as expected). I then used your code (except I had to create a context, and guess at radius values, then ran it. Image looks perfect with a blur of 0, 5, 10, and 25.
Obviously the issue is something else you are doing. What I suggest is that you keep adding to the test project until you can find what step is the problem (context? other image processing?)
This is the entirety of my code:
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
let im1 = UIImage(named: "Image.jpg")!
let cim = CIImage(image: im1)!
let im2 = applyBlurFilter(aCIImage: cim, val: 25)
let iv = UIImageView(image: im2)
iv.contentMode = .scaleToFill
self.view.addSubview(iv)
}
func applyBlurFilter(aCIImage: CIImage, val: CGFloat) -> UIImage {
let clampFilter = CIFilter(name: "CIAffineClamp")
clampFilter?.setDefaults()
clampFilter?.setValue(aCIImage, forKey: kCIInputImageKey)
let blurFilter = CIFilter(name: "CIGaussianBlur")
blurFilter?.setValue(clampFilter?.outputImage, forKey: kCIInputImageKey)
blurFilter?.setValue(val, forKey: kCIInputRadiusKey)
let rect = aCIImage.extent
if let output = blurFilter?.outputImage {
let context = CIContext(options: nil)
if let cgimg = context.createCGImage(output, from: rect) {
let processedImage = UIImage(cgImage: cgimg)
return processedImage
}
}
fatalError()
}
}

Related

How to show the image in a way that the pixels are shown cascading from top to bottom?

I need to display a UIImage on splash screen/ launch screen where the image will be shown such that the pixels look like they are cascading from top to bottom to display the full image. I donot have any snapshot of such animation but i have created the pixelated image as below:
guard let currentCGImage = UIImage(named: "back")?.cgImage else {
return
}
let currentCIImage = CIImage(cgImage: currentCGImage)
let filter = CIFilter(name: "CIPixellate")
filter?.setValue(currentCIImage, forKey: kCIInputImageKey)
filter?.setValue(12, forKey: kCIInputScaleKey)
guard let outputImage = filter?.outputImage else { return }
let context = CIContext()
if let cgimg = context.createCGImage(outputImage, from: outputImage.extent) {
let processedImage = UIImage(cgImage: cgimg)
self.imgView.image = processedImage
}
Does anybody has any idea of such animation?

Any way to speed this UILabel blur code?

Here is the code and it is really slow, like seconds slow to render about 25 labels.
extension UILabel{
func deBlur(){
for subview in self.subviews {
if (subview.tag == 99999) {
subview.removeFromSuperview()
}
}
}
func blur(){
let blurRadius:CGFloat = 5.1
UIGraphicsBeginImageContext(bounds.size)
layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
let blurFilter = CIFilter(name: "CIGaussianBlur")
blurFilter?.setDefaults()
let imageToBlur = CIImage(cgImage: (image?.cgImage)!)
blurFilter?.setValue(imageToBlur, forKey: kCIInputImageKey)
blurFilter?.setValue(blurRadius, forKey: "inputRadius")
let outputImage: CIImage? = blurFilter?.outputImage
let context = CIContext(options: nil)
let cgimg = context.createCGImage(outputImage!, from: (outputImage?.extent)!)
layer.contents = cgimg!
}
}
Any image / UIGraphics gurus know why this is so sloooow?
UPDATE: This line of code is the culprit. However, it is also needed to create the blur effect.
let cgimg = UILabel.context.createCGImage(outputImage!, from: (outputImage?.extent)!)

Get UIImage from function

I wrote a function to apply the filter to a photo. Why the function always returns nil?
func getImage() -> UIImage? {
let openGLContext = EAGLContext(api: .openGLES2)
let context = CIContext(eaglContext: openGLContext!)
let coreImage = UIImage(cgImage: image.cgImage!)
filter.setValue(coreImage, forKey: kCIInputImageKey)
filter.setValue(1, forKey: kCIInputContrastKey)
if let outputImage = filter.value(forKey: kCIOutputImageKey) as? CIImage {
let output = context.createCGImage(outputImage, from: outputImage.extent)
return UIImage(cgImage: output!)
}
return nil
}
When you working with coreImage in iOS.You have to considered coreImage classes such as CIFilter, CIContext, CIVector, and CIColor and the input image should be a CIImage which holds the image data. It can be created from a UIImage.
Note: You haven't mentioned about your filter definition or custom filter that you using.From your code, I can see you trying to change the contrast of an input image.So, I am testing the code using CIColorControls filter to adjust the contrast.
func getImage(inputImage: UIImage) -> UIImage? {
let openGLContext = EAGLContext(api: .openGLES2)
let context = CIContext(eaglContext: openGLContext!)
let filter = CIFilter(name: "CIColorControls")
let coreImage = CIImage(image: filterImage.image!)
filter?.setValue(coreImage, forKey: kCIInputImageKey)
filter?.setValue(5, forKey: kCIInputContrastKey)
if let outputImage = filter?.value(forKey: kCIOutputImageKey) as? CIImage {
let output = context.createCGImage(outputImage, from: outputImage.extent)
return UIImage(cgImage: output!)
}
return nil
}
You can call the above func like below.
filterImage.image = getImage(inputImage: filterImage.image!)
Output:
You never enter the if in that case. Maybe filter.value returns nil OR it isn't a CIImage it returns. Extract it like that and set a breakpoint at if or add a print between:
let value = filter.value(forKey: kCIOutputImageKey)
if let outputImage = value as? CIImage {
let output = context.createCGImage(outputImage, from: outputImage.extent)
return UIImage(cgImage: output!)
}
If you need more informations you have to share the filter definition and usage code.

Swift 3.0 CIEdgeWork filter not working

I am trying to apply CIEdgeWork filter to my inputImage and place the filtered image into myImage (which is a UIImageView). I'm not getting any result (just a blank screen). This same style code works with other filters like CIEdges. Anyone know what i'm doing wrong? Testing on ios10 devices.
let context = CIContext(options: nil)
if let edgeWorkFilter = CIFilter(name: "CIEdgeWork") {
let beginImage = CIImage(image: inputImage)
edgeWorkFilter.setValue(beginImage, forKey: kCIInputImageKey)
edgeWorkFilter.setValue(3.0, forKey: kCIInputRadiusKey)
if let output = edgeWorkFilter.outputImage {
if let cgimg = context.createCGImage(output, from: output.extent) {
let processedImage = UIImage(cgImage: cgimg)
myImage.image = processedImage
}
}
}
Found the solution, set background color of the image to something other than White.

UIImageView contentMode not working after blur effect application

I'm attempting to set the image property of a UIImageView to an image I'm blurring with CoreImage. The code works perfectly with an unfiltered image, but when I set the background image to the filtered image, contentMode appears to stop working for the UIImageView -- instead of aspect filling, the image becomes vertically stretched. In addition to setting contentMode in code, I also set it on the storyboard but the result was the same.
I'm using Swift 2 / Xcode 7.
func updateBackgroundImage(image: UIImage) {
backgroundImage.contentMode = .ScaleAspectFill
backgroundImage.layer.masksToBounds = true
backgroundImage.image = blurImage(image)
}
func blurImage(image: UIImage) -> UIImage {
let imageToBlur = CIImage(image: image)!
let blurfilter = CIFilter(name: "CIGaussianBlur")!
blurfilter.setValue(10, forKey: kCIInputRadiusKey)
blurfilter.setValue(imageToBlur, forKey: "inputImage")
let resultImage = blurfilter.valueForKey("outputImage") as! CIImage
let croppedImage: CIImage = resultImage.imageByCroppingToRect(CGRectMake(0, 0, imageToBlur.extent.size.width, imageToBlur.extent.size.height))
let blurredImage = UIImage(CIImage: croppedImage)
return blurredImage
}
Why is filtering with CIImage causing my image to ignore contentMode and how do I fix the issue?
Solution is to replace your line:
let blurredImage = UIImage(CIImage: croppedImage)
with these 2 lines:
let context = CIContext(options: nil)
let blurredImage = UIImage (CGImage: context.createCGImage(croppedImage, fromRect: croppedImage.extent))
So your full blurImage function would look like this:
func blurImage(image: UIImage) -> UIImage {
let imageToBlur = CIImage(image: image)!
let blurfilter = CIFilter(name: "CIGaussianBlur")!
blurfilter.setValue(10, forKey: kCIInputRadiusKey)
blurfilter.setValue(imageToBlur, forKey: "inputImage")
let resultImage = blurfilter.valueForKey("outputImage") as! CIImage
let croppedImage: CIImage = resultImage.imageByCroppingToRect(CGRectMake(0, 0, imageToBlur.extent.size.width, imageToBlur.extent.size.height))
let context = CIContext(options: nil)
let blurredImage = UIImage (CGImage: context.createCGImage(croppedImage, fromRect: croppedImage.extent))
return blurredImage
}

Resources