I am using CIDetector to detect face in a UIImage. i am getting the face rect correctly but when i crop the image to detected face rect. it is not showing on my image view.
I have already checked. my image is not nil
Here is my code :-
#IBAction func detectFaceOnImageView(_: UIButton) {
let image = myImageView.getFaceImage()
myImageView.image = image
}
extension UIView {
func getFaceImage() -> UIImage? {
let faceDetectorOptions: [String: AnyObject] = [CIDetectorAccuracy: CIDetectorAccuracyHigh as AnyObject]
let faceDetector: CIDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: faceDetectorOptions)!
let viewScreenShotImage = generateScreenShot(scaleTo: 1.0)
if viewScreenShotImage.cgImage != nil {
let sourceImage = CIImage(cgImage: viewScreenShotImage.cgImage!)
let features = faceDetector.features(in: sourceImage)
if features.count > 0 {
var faceBounds = CGRect.zero
var faceImage: UIImage?
for feature in features as! [CIFaceFeature] {
faceBounds = feature.bounds
let faceCroped: CIImage = sourceImage.cropping(to: faceBounds)
faceImage = UIImage(ciImage: faceCroped)
}
return faceImage
} else {
return nil
}
} else {
return nil
}
}
func generateScreenShot(scaleTo: CGFloat = 3.0) -> UIImage {
let rect = self.bounds
UIGraphicsBeginImageContextWithOptions(rect.size, false, 0.0)
let context = UIGraphicsGetCurrentContext()
self.layer.render(in: context!)
let screenShotImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
let aspectRatio = screenShotImage.size.width / screenShotImage.size.height
let resizedScreenShotImage = screenShotImage.scaleImage(toSize: CGSize(width: self.bounds.size.height * aspectRatio * scaleTo, height: self.bounds.size.height * scaleTo))
return resizedScreenShotImage!
}
}
For More Information, I am attaching Screen Shots of values .
Screen Shot 1
Screen Shot 2
Screen Shot 3
Try this:
let faceCroped: CIImage = sourceImage.cropping(to: faceBounds)
//faceImage = UIImage(ciImage: faceCroped)
let cgImage: CGImage = {
let context = CIContext(options: nil)
return context.createCGImage(faceCroped, from: faceCroped.extent)!
}()
faceImage = UIImage(cgImage: cgImage)
Related
After captured photo from camera, I was doing image compression For (400kb and 1 Mb), it look almost 3 seconds in iPhone 6 and less than a second in iPhone 6s.
Is there any way to get thumbnail and original image without doing manual compression?
Code used for image compression
Extension for UIImage
extension UIImage {
// MARK: - UIImage+Resize
func compressTo(_ expectedSizeInMb:Int) -> Data? {
let sizeInBytes = expectedSizeInMb * 1024 * 1024
var needCompress:Bool = true
var imgData:Data?
var compressingValue:CGFloat = 1.0
while (needCompress && compressingValue > 0.0) {
if let data:Data = jpegData(compressionQuality: compressingValue) {
if data.count < sizeInBytes {
needCompress = false
imgData = data
} else {
compressingValue -= 0.1
}
}
}
if let data = imgData {
if (data.count < sizeInBytes) {
return data
}
}
return nil
}
}
usage:
if let imageData = image.compressTo(1) {
print(imageData)
}
For images saved in Photos Library :
Try :
let phAsset = info[UIImagePickerController.InfoKey.phAsset] as! PHAsset
let options = PHImageRequestOptions()
options.deliveryMode = .fastFormat
options.isSynchronous = false
// you can change your target size to CGSize(width: Int , height: Int) any number you want.
PHImageManager.default().requestImage(for: phAsset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options, resultHandler: { image , _ in
let thumbnail = image
// use your thumbnail
})
For Captured images from Camera, you can get image pixels without recalculating data count :
let image = info[UIImagePickerController.InfoKey.originalImage] as! UIImage
// pixels are the same on each device’s camera
let widthPixels = image.size.width * image.scale
let heightPixels = image.size.height * image.scale
let sizeInBytes = 1024 * 1024
var thumbnail : UIImage! = nil
if Int(widthPixels * heightPixels) > sizeInBytes {
// assign custom width and height you need
let rect = CGRect(x: 0.0, y: 0.0, width: 100, height: 100)
UIGraphicsBeginImageContextWithOptions(rect.size, false, 1)
let context = UIGraphicsGetCurrentContext()
context?.interpolationQuality = .low
image.draw(in: rect)
let resizedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
thumbnail = resizedImage
} else {
thumbnail = image
}
I create one QRCode Generator with deferent color I want to remove the Gray color in Frame and have really one white color or clear color after I use the filter this gray color
generate some time
let data = string.data(using: .isoLatin1, allowLossyConversion: false)
if let filter = CIFilter(name: "CIQRCodeGenerator") {
guard let colorFilter = CIFilter(name: "CIFalseColor") else { return nil }
filter.setValue(data, forKey: "inputMessage")
filter.setValue("H", forKey: "inputCorrectionLevel")
colorFilter.setValue(filter.outputImage, forKey: "inputImage")
colorFilter.setValue(CIColor(color: UIColor.clear), forKey: "inputColor1")
colorFilter.setValue(CIColor(color: UIColor.black), forKey: "inputColor0")
guard let qrCodeImage = colorFilter.outputImage
else {
return nil
}
let scaleX = imageView.frame.size.width / qrCodeImage.extent.size.width
let scaleY = imageView.frame.size.height / qrCodeImage.extent.size.height
let transform = CGAffineTransform(scaleX: scaleX, y: scaleY)
if let output = colorFilter.outputImage?.transformed(by: transform) {
let image = convert(cmage:(output.transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY))))
return image
}
}
return nil
}
func convert(cmage:CIImage) -> UIImage
{
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
if I don't use the filter for change the color I don't have this problem
or If I used the blue color I have one frame with aqua blue color
Using the extension from the link I posted as a starting point:
extension String {
func qrCode(background: UIColor = .white, color: UIColor = .black, output: CGSize = CGSize(width: 250, height: 250))-> UIImage? {
guard
let data = data(using: .isoLatin1),
let filter = CIFilter(name: "CIQRCodeGenerator")
else { return nil }
filter.setValue(data, forKey: "inputMessage")
filter.setValue("M", forKey: "inputCorrectionLevel")
guard let image = filter.outputImage
else { return nil }
let size = image.extent.integral
let matrix = CGAffineTransform(scaleX: output.width / size.width, y: output.height / size.height)
UIGraphicsBeginImageContextWithOptions(output, false, 0)
defer { UIGraphicsEndImageContext() }
guard
let colorFilter = CIFilter(name: "CIFalseColor",
parameters: ["inputImage" : image.transformed(by: matrix),
"inputColor1": CIColor(color: background) ,
"inputColor0": CIColor(color: color)]),
let coloredImage = colorFilter.outputImage
else { return nil }
UIGraphicsBeginImageContextWithOptions(output, false, 0)
defer { UIGraphicsEndImageContext() }
UIImage(ciImage: coloredImage).draw(in: CGRect(origin: .zero, size: output))
return UIGraphicsGetImageFromCurrentImageContext()
}
}
let link = "https://stackoverflow.com/questions/51178573/swift-image-data-from-ciimage-qr-code-how-to-render-cifilter-output?noredirect=1"
if let coloredQRCode = link.qrCode(color: .red, output: CGSize(width: 500, height: 500)) {
coloredQRCode
}
I'm trying to resize a CVPixelBuffer to a size of 128x128. I'm working with one that is 750x750. I'm currently using the CVPixelBuffer to create a new CGImage, which I resize then convert back into a CVPixelBuffer. Here is my code:
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: 128, height: 128)
if let image = context.createCGImage(ciImage, from: imageRect) {
let t = CIImage(cgImage: image)
let new = t.applying(transformation)
context.render(new, to: pixelBuffer)
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
}
}
return nil
}
I've also tried scaling the CIImage then converting it:
let t = CIImage(cgImage: image)
let transformation = CGAffineTransform(scaleX: 1, y: 2)
let new = t.applying(transformation)
context.render(new, to: pixelBuffer)
But that didn't work either.
Any help is appreciated. Thanks!
There's no need for pixel buffer rendering and alike. Just transform the original CIImage and crop to size. Cropping is necessary if you source and destination dimensions aren't proportional.
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let srcWidth = CGFloat(ciImage.extent.width)
let srcHeight = CGFloat(ciImage.extent.height)
let dstWidth: CGFloat = 128
let dstHeight: CGFloat = 128
let scaleX = dstWidth / srcWidth
let scaleY = dstHeight / srcHeight
let scale = min(scaleX, scaleY)
let transform = CGAffineTransform.init(scaleX: scale, y: scale)
let output = ciImage.transformed(by: transform).cropped(to: CGRect(x: 0, y: 0, width: dstWidth, height: dstHeight))
return UIImage(ciImage: output)
}
return nil
}
Try this
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let resizedCIImage = ciImage.applying(CGAffineTransform(scaleX: 128.0 / 750.0, y: 128.0 / 750.0))
let context = CIContext()
if let image = context.createCGImage(resizedCIImage, from: resizedCIImage.extent) {
return UIImage(cgImage: image)
}
}
return nil
}
Here I assume that pixel buffer is square and size is equal to 750x750, you can change it to work with all aspect ratios and sizes
Before I asking this question, I have searched the related post:
"unrecognized selector" when attempting to access CIFilter's outputImage
I don't know if is because of using swift or extension, I will get error. I have tested two methods to get the CIImage, but fails in EXC_BAD_INSTRUCTION:
Attention
my url is not http:// prefix, but weixin://wxpay/bizpayurl?pr=ZwBVaW0, and I think this is not the reason of the error.
Method one:
extension String {
func initQRImage() ->UIImage {
let filter:CIFilter = CIFilter.init(name: "CIQRCodeGenerator")!
filter.setDefaults()
let data:Data = self.data(using: String.Encoding.utf8)!
filter.setValue(data, forKey: "inputMessage")
let outputImage:CGImage = filter.outputImage as! CGImage // EXC_BAD_INSTRUCTION here
let qr_image = UIImage.init(cgImage: outputImage)
return qr_image
}
}
Method two:
extension String {
func initQRImage() ->UIImage {
let url:URL = URL.init(fileURLWithPath: self)
let inputImage:CIImage = CIImage.init(contentsOf: url)! // EXC_BAD_INSTRUCTION here
let filter: CIFilter = CIFilter.init(name: "CIAreaAverage")!
filter.setValue(inputImage, forKey: kCIInputImageKey)
let inputExtent:CGRect = inputImage.extent
let extent:CIVector = CIVector.init(x: inputExtent.origin.x, y: inputExtent.origin.y, z: inputExtent.size.width, w: inputExtent.size.height)
filter.setValue(extent, forKey: kCIInputExtentKey)
let outputImage:CIImage = filter.value(forKey: "outputImage") as! CIImage
let qr_image = UIImage.init(cgImage: outputImage as! CGImage)
return qr_image
}
}
Two method will report EXC_BAD_INSTRUCTION error here, you can see the annotation I write after the report error line.
EDIT - 1
I have tried in my project again, not using extension, there is the error too, and data is not nil:
I think the data is nil.
let data:Data = self.data(using: String.Encoding.utf8)!
Also an UIImage instantiated with CIImage has no bitmap, it has no actual image, it's just a set of instructions for applying a filter. So your methods to convert to UIImage shouldn't work.
Finally I found a outdated method to generate QR code, after my improvement, it becomes this:
// quality can modify the defintion
class func generateQRImage(stringQR:NSString, withSizeRate rate:CGFloat, quality:CGFloat?) -> UIImage
{
let filter:CIFilter = CIFilter(name:"CIQRCodeGenerator")!
filter.setDefaults()
let data:NSData = stringQR.data(using: String.Encoding.utf8.rawValue)! as NSData
filter.setValue(data, forKey: "inputMessage")
let outputImg:CIImage = filter.outputImage!
let context:CIContext = CIContext(options: nil)
var tmp_quality = quality
if quality == nil {
tmp_quality = 1.0
}
let transform: CGAffineTransform = CGAffineTransform(scaleX: tmp_quality!, y: tmp_quality!);
let outputImg_after = outputImg.applying(transform)
let cgimg:CGImage = context.createCGImage(outputImg_after, from: outputImg_after.extent)!
var img:UIImage = UIImage(cgImage: cgimg, scale: 1.0, orientation: UIImageOrientation.up)
let width = img.size.width * rate
let height = img.size.height * rate
UIGraphicsBeginImageContext(CGSize.init(width: width, height: height))
let cgContxt:CGContext = UIGraphicsGetCurrentContext()!
cgContxt.interpolationQuality = .high // cgContxt kCGInterpolationNone
img.draw(in: CGRect.init(x: 0, y: 0, width: width, height: height)) // (0, 0, width, height)
img = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return img
}
I am trying to generate QR Code using iOS Core Image API:
func createQRForString(#data : NSData)->CIImage!{
var qrFilter = CIFilter(name: "CIQRCodeGenerator")
qrFilter.setValue(data, forKey: "inputMessage")
qrFilter.setValue("H", forKey:"inputCorrectionLevel")
return qrFilter.outputImage
}
func createNonInterpolatedImageFromCIImage(image : CIImage,withScale scale:CGFloat)->UIImage{
let cgImage = CIContext(options: nil).createCGImage(image, fromRect: image.extent())
UIGraphicsBeginImageContext(CGSizeMake(image.extent().size.width*scale, image.extent().size.height*scale))
let context = UIGraphicsGetCurrentContext()
CGContextSetInterpolationQuality(context, kCGInterpolationNone)
let scaledImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return scaledImage
}
And the following code in viewDidLoad method :
let data = "Hello World".dataUsingEncoding(NSUTF8StringEncoding)
if let image=createQRForString(data: data!){
let uiimage = createNonInterpolatedImageFromCIImage(image, withScale: 1.0)
imageView.image = uiimage
}
else{
println("Error loading image")
}
}
But it neither prints "Error" nor shows qr code in the imageView.
Here is the solution:
override func viewDidLoad() {
super.viewDidLoad()
self.imgView.image = generateCode()
}
func generateCode() -> UIImage {
let filter = CIFilter(name: "CIQRCodeGenerator")
let data = "Hello World".dataUsingEncoding(NSUTF8StringEncoding)
filter.setValue("H", forKey:"inputCorrectionLevel")
filter.setValue(data, forKey:"inputMessage")
let outputImage = filter.outputImage
let context = CIContext(options:nil)
let cgImage = context.createCGImage(outputImage, fromRect:outputImage.extent())
let image = UIImage(CGImage:cgImage, scale:1.0, orientation:UIImageOrientation.Up)
let resized = resizeImage(image!, withQuality:kCGInterpolationNone, rate:5.0)
return resized
}
func resizeImage(image: UIImage, withQuality quality: CGInterpolationQuality, rate: CGFloat) -> UIImage {
let width = image.size.width * rate
let height = image.size.height * rate
UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), true, 0)
let context = UIGraphicsGetCurrentContext()
CGContextSetInterpolationQuality(context, quality)
image.drawInRect(CGRectMake(0, 0, width, height))
let resized = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return resized;
}