I am building an iOS app which is based on image operations.
I want to increase and decrease brightness of image by slider value.
I have used this code to do this:
#IBOutlet var imageView: UIImageView!
#IBOutlet var uiSlider : UISlider!
override func viewDidLoad()
{
super.viewDidLoad()
var image = UIImage(named: "54715869.jpg")
imageView.image = image
uiSlider.minimumValue = -0.2
uiSlider.maximumValue = 0.2
uiSlider.value = 0.0
uiSlider.maximumTrackTintColor = UIColor(red: 0.1, green: 0.7, blue: 0, alpha: 1)
uiSlider.minimumTrackTintColor = UIColor.blackColor()
uiSlider.addTarget(self, action: "brightnesssliderMove:", forControlEvents: UIControlEvents.TouchUpInside)
uiSlider.addTarget(self, action: "brightnesssliderMove:", forControlEvents: UIControlEvents.TouchUpOutside)
}
func brightnesssliderMove(sender: UISlider)
{
var filter = CIFilter(name: "CIColorControls");
filter.setValue(NSNumber(float: sender.value), forKey: "inputBrightness")
var image = self.imageView.image
var rawimgData = CIImage(image: image)
filter.setValue(rawimgData, forKey: "inputImage")
var outpuImage = filter.valueForKey("outputImage")
imageView.image = UIImage(CIImage: outpuImage as CIImage)
}
Now my question is that when I increase slider value it also increase brightness of image but only when I change slider position for first time.
When I am again changing the position of slider I am getting this errror:
fatal error: unexpectedly found nil while unwrapping an Optional value.
This error is coming at line:
imageView.image = UIImage(CIImage: outpuImage as CIImage)
This time rawimgData data comes nil.
I found answer to my question here is how i have done coding.
import CoreImage
class ViewController: UIViewController,UIImagePickerControllerDelegate,UINavigationControllerDelegate {
var aCIImage = CIImage();
var contrastFilter: CIFilter!;
var brightnessFilter: CIFilter!;
var context = CIContext();
var outputImage = CIImage();
var newUIImage = UIImage();
override func viewDidLoad() {
super.viewDidLoad()
var aUIImage = imageView.image;
var aCGImage = aUIImage?.CGImage;
aCIImage = CIImage(CGImage: aCGImage)
context = CIContext(options: nil);
contrastFilter = CIFilter(name: "CIColorControls");
contrastFilter.setValue(aCIImage, forKey: "inputImage")
brightnessFilter = CIFilter(name: "CIColorControls");
brightnessFilter.setValue(aCIImage, forKey: "inputImage")
}
func sliderContrastValueChanged(sender: UISlider) {
contrastFilter.setValue(NSNumber(float: sender.value), forKey: "inputContrast")
outputImage = contrastFilter.outputImage;
var cgimg = context.createCGImage(outputImage, fromRect: outputImage.extent())
newUIImage = UIImage(CGImage: cgimg)!
imageView.image = newUIImage;
}
func sliderValueChanged(sender: UISlider) {
brightnessFilter.setValue(NSNumber(float: sender.value), forKey: "inputBrightness");
outputImage = brightnessFilter.outputImage;
let imageRef = context.createCGImage(outputImage, fromRect: outputImage.extent())
newUIImage = UIImage(CGImage: imageRef)!
imageView.image = newUIImage;
}
}
You can use AlamofireImage, here's an example of how you can do it:
#IBAction func brightnessChanged(sender: UISlider) {
let filterParameters = ["inputBrightness": sender.value]
imageView.image = originalImage.af_imageWithAppliedCoreImageFilter("CIColorControls", filterParameters: filterParameters)
}
Related
Photos editing adjustments provides a realtime view of the applied adjustments as they are applied. I wasn't able to find any samples of how you do this. All the examples seems to show that you apply the filters through a pipeline of sorts and then take the resulting image and update the screen with the result. See code below.
Photos seems to show the adjustment applied to the onscreen image. How do they achieve this?
func editImage(inputImage: CGImage) {
DispatchQueue.global().async {
let beginImage = CIImage(cgImage: inputImage)
guard let exposureOutput = self.exposureFilter(beginImage, ev: self.brightness) else {
return
}
guard let vibranceOutput = self.vibranceFilter(exposureOutput, amount: self.vibranceAmount) else {
return
}
guard let unsharpMaskOutput = self.unsharpMaskFilter(vibranceOutput, intensity: self.unsharpMaskIntensity, radius: self.unsharpMaskRadius) else {
return
}
guard let sharpnessOutput = self.sharpenFilter(unsharpMaskOutput, sharpness: self.unsharpMaskIntensity) else {
return
}
if let cgimg = self.context.createCGImage(sharpnessOutput, from: vibranceOutput.extent) {
DispatchQueue.main.async {
self.cgImage = cgimg
}
}
}
}
OK, I just found the answer - use MTKView, which is working fine except for getting the image to fill the view correctly!
For the benefit of others here are the basics... I have yet to figure out how to position the image correctly in the view - but I can see the filter applied in realtime!
class ViewController: NSViewController, MTKViewDelegate {
....
#objc dynamic var cgImage: CGImage? {
didSet {
if let cgimg = cgImage {
ciImage = CIImage(cgImage: cgimg)
}
}
}
var ciImage: CIImage?
// Metal resources
var device: MTLDevice!
var commandQueue: MTLCommandQueue!
var sourceTexture: MTLTexture! // 2
let colorSpace = CGColorSpaceCreateDeviceRGB()
var context: CIContext!
var textureLoader: MTKTextureLoader!
override func viewDidLoad() {
super.viewDidLoad()
// Do view setup here.
let metalView = MTKView()
metalView.translatesAutoresizingMaskIntoConstraints = false
self.imageView.addSubview(metalView)
NSLayoutConstraint.activate([
metalView.bottomAnchor.constraint(equalTo: view.bottomAnchor),
metalView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
metalView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
metalView.topAnchor.constraint(equalTo: view.topAnchor)
])
device = MTLCreateSystemDefaultDevice()
commandQueue = device.makeCommandQueue()
metalView.delegate = self
metalView.device = device
metalView.framebufferOnly = false
context = CIContext()
textureLoader = MTKTextureLoader(device: device)
}
public func draw(in view: MTKView) {
if let ciImage = self.ciImage {
if let currentDrawable = view.currentDrawable {
let commandBuffer = commandQueue.makeCommandBuffer()
let inputImage = ciImage // 2
exposureFilter.setValue(inputImage, forKey: kCIInputImageKey)
exposureFilter.setValue(ev, forKey: kCIInputEVKey)
context.render(exposureFilter.outputImage!,
to: currentDrawable.texture,
commandBuffer: commandBuffer,
bounds: CGRect(origin: .zero, size: view.drawableSize),
colorSpace: colorSpace)
commandBuffer?.present(currentDrawable)
commandBuffer?.commit()
}
}
}
I am trying to change the contrast and brightness of an image using the slider, and I have created a slider programatically. When I am trying to vary the contrast by the slider, am getting an error like
reason: '-[UISlider floatValue]: unrecognized selector sent to instance 0x103c4ffa0'`func viewforslide(){
vieew.frame = CGRect.init(x: 10, y:view.frame.size.height - 180, width: self.view.frame.size.width - 20, height: 40)
vieew.backgroundColor = UIColor.lightGray
vieew.layer.cornerRadius = vieew.frame.size.height/2
view.addSubview(vieew)
createslider()
}
func createslider(){
var sliderDemo = UISlider(frame:CGRect(x: 15, y: 5, width: vieew.frame.size.width - 30, height: 30))
sliderDemo.minimumValue = 0.0
sliderDemo.maximumValue = 1000.0
sliderDemo.isContinuous = true
sliderDemo.tintColor = UIColor.black
sliderDemo.value = 500.0
sliderDemo.addTarget(self, action: #selector(_sldComponentChangedValue),for: .valueChanged)
vieew.addSubview(sliderDemo)
}
#IBAction func _sldComponentChangedValue(sender: UISlider) {
// Set value to the nearest int
sender.setValue(Float(roundf(sender.value)), animated: false)
let newvalforslider = sender
print("\(newvalforslider)")
let displayinPercentage: Int = Int((sender.value/200) * 10000)
// contrastValueLabel.text = ("\(displayinPercentage)")
self.imageView.image = results.enhancedImage
let beginImage = CIImage(image: self.imageView.image!)
let filter = CIFilter(name: "CIColorControls")
filter?.setValue(beginImage, forKey: kCIInputImageKey)
filter?.setValue(sender.value, forKey: kCIInputContrastKey)
var filteredImage = filter?.outputImage
var context = CIContext(options: nil)
imageView.image = UIImage(cgImage: context.createCGImage(filteredImage!, from: (filteredImage?.extent)!)!)
var sliderValue = sender.value
}
`
If anyone helps me to do this, would be great. Thanks in advance.
While adding target method to sliderDemo you have given selector (_sldComponentChangedValue), which is not right, your actual method does receives an argument. That's the difference in methods name, so on slide you gets crash saying "unrecognized selector sent to instance"
instead do as follows
sliderDemo.addTarget(self, action: #selector(_sldComponentChangedValue(sender:)), for: .valueChanged);
#anisha If you complicate previous Ans. try this new code.
func increaseContrast(_ image: UIImage) -> UIImage {
let inputImage = CIImage(image: image)!
let parameters =
[
"inputContrast": NSNumber(value: 2) // set how many contrast you want
]
let outputImage = inputImage.applyingFilter("CIColorControls",
parameters: parameters)
let context = CIContext(options: nil)
let img = context.createCGImage(outputImage, from: outputImage.extent)!
return UIImage(cgImage: img)
}
Please don't judge me I'm just learning Swift.
Recently I installed MetalPetal framework and I followed the instructions:
https://github.com/MetalPetal/MetalPetal#example-code
But I get error because of MTIContext. Maybe I have to declare something more of MetalPetal?
My Code:
import UIKit
import MetalPetal
import CoreGraphics
class ViewController: UIViewController {
#IBOutlet weak var image1: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
weak var image: UIImage?
image = image1.image
var ciImage = CIImage(image: image!)
var cgImage1 = convertCIImageToCGImage(inputImage: ciImage!)
let imageFromCGImage = MTIImage(cgImage: cgImage1!)
let inputImage = imageFromCGImage
let filter = MTISaturationFilter()
filter.saturation = 1
filter.inputImage = inputImage
let outputImage = filter.outputImage
let context = MTIContext()
do {
try context.render(outputImage, to: pixelBuffer)
var image3: CIImage? = try context.makeCIImage(from: outputImage!)
//context.makeCIImage(from: image)
//context.makeCGImage(from: image)
} catch {
print(error)
}
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func convertCIImageToCGImage(inputImage: CIImage) -> CGImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(inputImage, from: inputImage.extent) {
return cgImage
}
return nil
}
}
#YuAo
Input Image
An UIImage is based on either underlying Quartz image (can be retrieved with cgImage) or an underlying Core Image (can be retrieved from UIImage with ciImage).
MTIImage offers constructors for both types.
MTIContext
A MTIContext must be initialized with a device that can be retrieved by calling MTLCreateSystemDefaultDevice().
Rendering
A rendering to a pixel buffer is not needed. We can get the result by calling makeCGImage.
Test
I've taken your source code above and slightly adapted it to the aforementioned points.
I also added a second UIImageView to see the result of the filtering. I also changed the saturation to 0 to see if the filter works
If GPU or shaders are involved it makes sense to test on a real device and not on the simulator.
The result looks like this:
In the upper area you see the original jpg, in the lower area the filter is applied.
Swift
The simplified Swift code that produces this result looks like this:
override func viewDidLoad() {
super.viewDidLoad()
guard let image = UIImage(named: "regensburg.jpg") else { return }
guard let cgImage = image.cgImage else { return }
imageView1.image = image
let filter = MTISaturationFilter()
filter.saturation = 0
filter.inputImage = MTIImage(cgImage: cgImage)
if let device = MTLCreateSystemDefaultDevice(),
let outputImage = filter.outputImage {
do {
let context = try MTIContext(device: device)
let filteredImage = try context.makeCGImage(from: outputImage)
imageView2.image = UIImage(cgImage: filteredImage)
} catch {
print(error)
}
}
}
I am trying to apply a CIFilter to an image that is already displayed in my imageView in my single view application but it does not appear to have applied the filter. Please can someone advise? My input image appear exactly the same.
SWIFT:
let filter = CIFilter.init(name: "CIHueAdjust")
let context = CIContext()
var extent: CGRect!
var scaleFactor: CGFloat!
#IBOutlet weak var img: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
let ciImage = CIImage.init(image: img.image!)
filter?.setDefaults()
filter?.setValue(ciImage, forKeyPath: kCIInputImageKey)
let result = filter?.outputImage
print("result: \(result)")
var image = UIImage.init(cgImage: context.createCGImage(result!, from: result!.extent)!)
img.image = image
img.image = image
}
CONSOLE:
result: Optional(<CIImage: 0x1c001a7a0 extent [0 0 2016 1512]>
affine [1 0 0 -1 0 1512] extent=[0 0 2016 1512] opaque
colormatch "sRGB IEC61966-2.1"_to_workingspace extent=[0 0 2016 1512] opaque
IOSurface 0x1c401a780(169) seed:1 YCC420f 601 alpha_one extent=[0 0 2016 1512] opaque
It's in the comments, but here's the full (and better formatted) answer on how to set up a call to CIHueAdjust, using Swift 4:
let filter = CIFilter(name: "CIHueAdjust")
let context = CIContext()
var extent: CGRect!
var scaleFactor: CGFloat!
#IBOutlet weak var img: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
let ciImage = CIImage(image: img.image!)
// Note: you may use kCIInputImageKey for inputImage
filter?.setValue(ciImage, forKey: "inputImage")
filter?.setValue(Float(1), forKey: "inputAngle")
let result = filter?.outputImage
var image = UIImage(cgImage: context.createCGImage(result!, from: result!.extent)!)
img.image = image
}
I just followed treehouse course and create my first Fun Fact app.In that they generate a random array quotes.
Needed:
I have placed image view using storyboard.Already when pressing one button random array quotes will generate.But i need when that same button pressed a random image should generate.I am new to swift .!
This is factbook.swift
struct FactBook {
// stored in arry to show all quotes
let factsArray = [
"You have to dream before your dreams can come true.",
"To succeed in your mission, you must have single-minded devotion to your goal.",
"You have to dream before your dreams can come true.",
"Love your job but don’t love your company, because you may not know when your company stops loving you.",
"Failure will never overtake me if my definition to succeed is strong enough.",
]
//make a random quote
func randomFact() -> String {
//
// let unsignedRandomNumber = arc4random_uniform(unsignedArrayCount)
//
let unsignedArrayCount = UInt32(factsArray.count)
let unsignedRandomNumber = arc4random_uniform(unsignedArrayCount)
let randomNumber = Int(unsignedRandomNumber)
//
// let unsignedRandomNumber = arc4random_uniform(unsignedArrayCount)
// let randomNumber = Int(signedRandomNumber)
return factsArray[randomNumber]
}
}
This is viewcontroller.swift
class ViewController: UIViewController {
#IBOutlet weak var funFactLabel: UILabel!
#IBOutlet weak var funFactButton: UIButton!
#IBOutlet weak var imgV: UIImageView!
let factBook = FactBook()
let colorWheel = ColorWheel()
//method to define
// let yourImage = UIImage(named: "apj")
// let imageview = UIImageView(image: yourImage)
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
funFactLabel.text = factBook.randomFact()
self.view.backgroundColor = UIColor(patternImage: UIImage(named: "apj")!)
// let yourImage = UIImage(named: "apj")
// let imageview = UIImageView(image: yourImage)
// self.view.addSubview(imageview)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func showFunFact() {
let randomColor = colorWheel.randomColor()
view.backgroundColor = randomColor
funFactButton.tintColor = randomColor
//funFactButton.tintColor = clearcolor
funFactLabel.text = factBook.randomFact()
}
}
The solution mainly is to use the same approach you have done with the random text. So to sum up, you should have an array of the images, and a function to select a random image. Then call that function from your view controller. A possible implementation to this approach is:
Add this array to your FactBook
let factsImagesArray = [
"image1.png",
"image2.png",
"image3.png",
"image4.png",
"image5.png",
]
Add this method to your FactBook
func randomFactImage() -> UIImage {
let unsignedArrayCount = UInt32(factsImageArray.count)
let unsignedRandomNumber = arc4random_uniform(unsignedArrayCount)
let randomNumber = Int(unsignedRandomNumber)
return UIImage(named: factsImageArray[randomNumber])!
}
and in your viewcontroller change showFunFact to:
#IBAction func showFunFact() {
let randomColor = colorWheel.randomColor()
view.backgroundColor = randomColor
funFactButton.tintColor = randomColor
funFactLabel.text = factBook.randomFact()
imgV.image = faceBook.randomFactImage()
}
Ofc you should have the image1.png, image2.png ... in your resources
#IBAction func randomimage(sender: AnyObject)
{
//list of Images in array
let image : NSArray = [ UIImage(named: "1.jpg")!,
UIImage(named: "2.jpg")!,
UIImage(named: "3.jpg")!,
UIImage(named: "4.jpg")!,
UIImage(named: "5.jpg")!,
UIImage(named: "6.jpg")!,
UIImage(named: "7.jpg")!]
//random image generating method
let imagerange: UInt32 = UInt32(image.count)
let randomimage = Int(arc4random_uniform(imagerange))
let generatedimage: AnyObject = image.objectAtIndex(randomimage)
self.myimage.image = generatedimage as? UIImage
}