Take photo after autofocusing automatically AVCapture - ios

I want to do this: When the camera is focused, the app will take picture automatically. But I found there's no way to detect if the camera is focused.
i'm using AVFoundation for taking the picture
i'm new to iOS don't know how to code it
can anyone help me for this issue

import UIKit
import AVFoundation
class ViewController: UIViewController {
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var error: NSError?
override func viewDidLoad() {
super.viewDidLoad()
let devices = AVCaptureDevice.devices().filter{ $0.hasMediaType(AVMediaTypeVideo) && $0.position == AVCaptureDevicePosition.Back }
if let captureDevice = devices.first as? AVCaptureDevice {
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &error))
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
captureSession.startRunning()
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.bounds = view.bounds
previewLayer.position = CGPointMake(view.bounds.midX, view.bounds.midY)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
let cameraPreview = UIView(frame: CGRectMake(0.0, 0.0, view.bounds.size.width, view.bounds.size.height))
cameraPreview.layer.addSublayer(previewLayer)
cameraPreview.addGestureRecognizer(UITapGestureRecognizer(target: self, action:"saveToCamera:"))
//get instance of phone camera
let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
//try to enable auto focus
if(captureDevice!.isFocusModeSupported(.continuousAutoFocus)) {
try! captureDevice!.lockForConfiguration()
captureDevice!.focusMode = .continuousAutoFocus
captureDevice!.unlockForConfiguration()
view.addSubview(cameraPreview)
}
}
}
}
func saveToCamera(sender: UITapGestureRecognizer) {
if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection) {
(imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData), nil, nil, nil)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}

Related

Double Tap or Use Button To Switch Camera From Back to Front (Xcode 8, Swift 3)

So, lately I have been trying to implement the function of switching the camera view from back to front camera in Swift 3. However, with no luck.
Currently, my default view is from the back camera - I can take pictures with it and then retake. But can anyone help me and show how do I either double tap the screen to switch cameras or simply use the assigned button to switch them? Thank you!
import UIKit
import AVFoundation
import FirebaseDatabase
class CameraView: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
override var prefersStatusBarHidden: Bool {
return true
}
var captureSession : AVCaptureSession!
var stillImageOutput : AVCaptureStillImageOutput!
var previewLayer : AVCaptureVideoPreviewLayer!
#IBOutlet var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
previewLayer?.frame = cameraView.bounds
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080
var backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var error : NSError?
do {
var input = try! AVCaptureDeviceInput(device: backCamera)
if (error == nil && captureSession?.canAddInput(input) != nil) {
captureSession?.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if (captureSession?.canAddOutput(stillImageOutput) != nil) {
captureSession?.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer (session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
captureSession?.startRunning() }
}
} catch {
}
}
#IBOutlet var tempImageView: UIImageView!
#IBAction func didPressTakePhoto(_ sender: UIButton) {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: {
(sampleBuffer, error) in
if sampleBuffer != nil {
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProvider.init(data: imageData as! CFData)
var cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
var image = UIImage (cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
self.tempImageView.image = image
self.tempImageView.isHidden = false
}
})
}
}
var didTakePhoto = Bool()
#IBAction func didPressTakeAnother(_ sender: UIButton) {
if didTakePhoto == true {
tempImageView.isHidden = true
didTakePhoto = false
} else {
captureSession?.startRunning()
didTakePhoto = true
}
}
}
don't see here any problems - here is working solution:
import Foundation
import UIKit
import AVFoundation
class MainViewController: UIViewController {
var tempImage: UIImageView?
var captureSession: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var currentCaptureDevice: AVCaptureDevice?
var usingFrontCamera = false
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
loadCamera()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
videoPreviewLayer!.frame = self.cameraPreviewSurface.bounds
}
#IBAction func switchButtonAction(_ sender: Any) {
usingFrontCamera = !usingFrontCamera
loadCamera()
}
func getFrontCamera() -> AVCaptureDevice?{
let videoDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in videoDevices!{
let device = device as! AVCaptureDevice
if device.position == AVCaptureDevicePosition.front {
return device
}
}
return nil
}
func getBackCamera() -> AVCaptureDevice{
return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
}
func loadCamera() {
if(captureSession == nil){
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
}
var error: NSError?
var input: AVCaptureDeviceInput!
currentCaptureDevice = (usingFrontCamera ? getFrontCamera() : getBackCamera())
do {
input = try AVCaptureDeviceInput(device: currentCaptureDevice)
} catch let error1 as NSError {
error = error1
input = nil
print(error!.localizedDescription)
}
for i : AVCaptureDeviceInput in (self.captureSession?.inputs as! [AVCaptureDeviceInput]){
self.captureSession?.removeInput(i)
}
if error == nil && captureSession!.canAddInput(input) {
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addOutput(stillImageOutput)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
//self.cameraPreviewSurface.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
self.cameraPreviewSurface.layer.addSublayer(videoPreviewLayer!)
DispatchQueue.main.async {
self.captureSession!.startRunning()
}
}
}
}
}
some notes:
cameraPreviewSurface - this is your UIView where camera will show
don't reassign the session, don't just add input, but before add new - remove existing ones,
p.s. code done with swift 3.0.1 / xcode 8.1
Cheers )
Xcode Version : Version 10.1 (10B61)
Swift Version : Swift 4.2
Change AVCaptureSession Capture Source
Refers from Stepan Maksymov Solution
we can simplified by replacing captureSession.inputs
First Create an IBAction Outlet and Connect ViewController to Change Camera View
#IBAction private func changeCamera(_ cameraButton: UIButton) {
usingFrontCamera = !usingFrontCamera
do{
captureSession.removeInput(captureSession.inputs.first!)
if(usingFrontCamera){
captureDevice = getFrontCamera()
}else{
captureDevice = getBackCamera()
}
let captureDeviceInput1 = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput1)
}catch{
print(error.localizedDescription)
}
}
Second step Copy simplified AVCaptureDevice Setting || refer Stepan Maksymov
func getFrontCamera() -> AVCaptureDevice?{
return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices.first
return nil
}
func getBackCamera() -> AVCaptureDevice?{
return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices.first
return nil
}
You can replace #IBAction with function like so.
func changeCamera(){
usingFrontCamera = !usingFrontCamera
do{
captureSession.removeInput(captureSession.inputs.first!)
if(usingFrontCamera){
captureDevice = getFrontCamera()
}else{
captureDevice = getBackCamera()
}
let captureDeviceInput1 = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput1)
}catch{
print(error.localizedDescription)
}
}
In addition to Stepan Maksymov post, I sugest to add this function
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
And call it instead of his post lines:
for i : AVCaptureDeviceInput in (self.captureSession?.inputs as! [AVCaptureDeviceInput]){
self.captureSession?.removeInput(i)
}
This way the cameras will change quicker.

Adding audio to iphone app xcode 7.3

I am very new to Swift and Xcode.
I am trying to design a simple camera App. I have successfully integrated camera, except I want to play a sound when the camera takes a picture. Im unsure about how to go about this.
Below is my code and I keep getting an error when invoking
Error :
(EXC_BAD_ACCESS(code=1, address=0x38)
in the line audioPlayer.play()
import UIKit
import AVFoundation
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
var kranz = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("Din Daa Daa ; George Kranz", ofType: "mp3")!)
var audioPlayer = AVAudioPlayer()
override func viewDidLoad() {
super.viewDidLoad()
do {let audioPlayer = try AVAudioPlayer(contentsOfURL: kranz, fileTypeHint: nil)
audioPlayer.prepareToPlay()
}
catch{
print("error")
}
}
#IBOutlet weak var CameraView: UIView!
override func viewWillAppear(animated: Bool) {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Back {
do {
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
CameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.CameraView.frame.width / 2, y: self.CameraView.frame.height / 2)
previewLayer.bounds = CameraView.frame
}
}
}
catch{
print("ERror")
}
}
}
}
#IBAction func TakePhoto(sender: UIButton) {
audioPlayer.play()
if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){
sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
buffer, error in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
})
}
}
}
import UIKit import AVFoundation
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
var audioPlayer = AVAudioPlayer()
override viewDidLoad(){
super.viewDidLoad()
prepareMusic("LazerNoise",type : "mp3" )
}
func prepareMusic(name :String! , type :String!){
let path = NSBundle.mainBundle().pathForResource(name, ofType: type)
let soundURL = NSURL(fileURLWithPath: path!)
do{
try audioPlayer = AVAudioPlayer(contentsOfURL: soundURL)
audioPlayer.prepareToPlay()
}
catch let err as NSError
{
print(err.debugDescription)
}
}
func playNstop(){
if audioPlayer.playing{
audioPlayer.stop()
}else{
audioPlayer.play()
}
}
#IBOutlet weak var CameraView: UIView!
override func viewWillAppear(animated: Bool) {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Back {
do {
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
CameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.CameraView.frame.width / 2, y: self.CameraView.frame.height / 2)
previewLayer.bounds = CameraView.frame
}
}
}
catch{
print("ERror")
}
}
}
}
#IBAction func TakePhoto(sender: UIButton) {
audioPlayer.play()
if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
buffer, error in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
})
}
}
}

Trying to capture an image using AVFoundation and when captured the output image is very dark(Almost looks Like a Black Screen)

I'm trying to capture an image using AVFoundation and when captured the output image is very dark(Almost looks Like a Black Screen) This is my code and I'm a novice to Swift so any help would Be Appreciated
import UIKit
import AVFoundation
import MobileCoreServices
class ViewController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
#IBOutlet var captureView: UIView!
#IBOutlet var capturedImage: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
for device in devices {
if (device.hasMediaType(AVMediaTypeVideo)) {
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
}
}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func beginSession() {
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.captureView.layer.addSublayer(previewLayer)
previewLayer?.frame = self.captureView.layer.frame
captureSession.startRunning()
}
#IBAction func capture(sender: AnyObject) {
var stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right)
var capturedImage = UIImageView(image: image)
//Show the captured image to
//Save the captured preview to image
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
})
}
}
}
Move this code block
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
to
func beginSession()

AVCaptureVideoPreviewLayer looks different than captured Image

The image presented in my PreviewLayer has decent color/light levels, but when I capture the image, and put it in a UIImageView it looks super dark.
Why are the light levels so different?
And here's the controller code
//Parent Class
class CameraViewController: UIViewController {
#IBOutlet var photoPreviewView: UIView!
let captureSession = AVCaptureSession()
var previewLayer: AVCaptureVideoPreviewLayer?
var captureDevice: AVCaptureDevice?
var imageData: NSData?
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetMedium
let devices = AVCaptureDevice.devices()
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
}
}
}
if captureDevice != nil {
beginSession()
}
}
func captureImage(callback:(NSData)->()) {
let stillImageOutput = AVCaptureStillImageOutput()
captureSession.addOutput(stillImageOutput)
let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo)
if videoConnection != nil {
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection) { (imageDataSampleBuffer, error) -> Void in
callback(AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer))
}
} else {
println("couldn't find video connection")
}
}
func beginSession() {
var err : NSError? = nil
let captureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
captureSession.addInput(captureDeviceInput)
if err != nil {
println("error: \(err?.localizedDescription)")
}
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
photoPreviewView.layer.addSublayer(previewLayer)
//FIXME Preview layer is not being positioned as expected. This is an arbitrary hack to make it "look right" on my iphone6
previewLayer.frame = CGRect(x: -64, y: 0, width: 504, height: 504)
captureSession.startRunning()
}
}
class SubmitGuessViewController: CameraViewController {
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject!) -> Void {
super.prepareForSegue(segue, sender: sender)
let createImageFromData = {(imageData: NSData) -> () in
if ( imageData.length > 0 ) {
let checkGuessViewController = segue.destinationViewController as CheckGuessViewController
checkGuessViewController.submittedImage = UIImage(data: imageData)
} else {
println("Image Data not captured")
}
}
self.captureImage(createImageFromData)
}
}

Capture current camera image using AVFoundation

I'm trying to capture the image and save it to a variable when I press "myButton". What should I do?
My code is as follows:
import UIKit
import AVFoundation
import MobileCoreServices
class ViewController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
#IBOutlet var myTap: UITapGestureRecognizer!
#IBOutlet weak var myButton: UIButton!
#IBAction func shotPress(sender: UIButton) {
//Save image to variable somehow
})
var stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
for device in devices {
if (device.hasMediaType(AVMediaTypeVideo)) {
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
}
}
}
}
}
func updateDeviceSettings(focusValue : Float, isoValue : Float) {
if let device = captureDevice {
if(device.lockForConfiguration(nil)) {
device.focusMode = AVCaptureFocusMode.ContinuousAutoFocus
device.unlockForConfiguration()
}
}
}
func beginSession() {
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer)
self.view.bringSubviewToFront(myButton)
previewLayer?.frame = self.view.layer.frame
captureSession.startRunning()
}
}
This is what I am using on one of the app that I am working on. Should be helpful for your problem as well.
func capturePicture(){
println("Capturing image")
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
captureSession.addOutput(stillImageOutput)
if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right)
var imageView = UIImageView(image: image)
imageView.frame = CGRect(x:0, y:0, width:self.screenSize.width, height:self.screenSize.height)
//Show the captured image to
self.view.addSubview(imageView)
//Save the captured preview to image
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
})
}
}
Updated for Swift 3:
var stillImageOutput = AVCaptureStillImageOutput.init()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
self.cameraSession.addOutput(stillImageOutput)
if let videoConnection = stillImageOutput.connection(withMediaType:AVMediaTypeVideo){
stillImageOutput.captureStillImageAsynchronously(from:videoConnection, completionHandler: {
(sampleBuffer, error) in
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProvider.init(data: imageData as! CFData)
var cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
var image = UIImage.init(cgImage: cgImageRef!, scale: 1.0, orientation: .right)
// do something with image
})
}

Resources