In my code, Xcode say AVCaptureStillImageOutput is deprecated but I don't know how to change by AVCapturePhotoOutput.
I know that the var output is the problem and the one who I had to replace.
Thanks for your help.
class CameraView: UIViewController,UIImagePickerControllerDelegate{
var session: AVCaptureSession?
var input: AVCaptureDeviceInput?
var output: AVCaptureStillImageOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
//Initialize session an output variables this is necessary
session = AVCaptureSession()
session?.sessionPreset = AVCaptureSession.Preset.photo;
output = AVCaptureStillImageOutput()
let camera = getDevice(position: .back)
do {
input = try AVCaptureDeviceInput(device: camera!)
} catch let error as NSError {
print(error)
input = nil
}
if(session?.canAddInput(input!) == true){
session?.addInput(input!)
output?.outputSettings = [AVVideoCodecKey : AVVideoCodecType.jpeg]
if(session?.canAddOutput(output!) == true){
session?.addOutput(output!)
previewLayer = AVCaptureVideoPreviewLayer(session: session!)
previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
previewLayer?.frame = self.view.bounds
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill;
cameraView.layer.addSublayer(previewLayer!)
session?.startRunning()
}
}
//Select library photo
}
//Get the device (Front or Back)
func getDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let devices: NSArray = AVCaptureDevice.devices() as NSArray;
for de in devices {
let deviceConverted = de as! AVCaptureDevice
if(deviceConverted.position == position){
return deviceConverted
}
}
return nil
}
#IBOutlet weak var prendreBouton: UIButton!
#IBOutlet weak var previewImage: UIImageView!
#IBAction func takeAPicture(_ sender: Any) {
if let videoConnection = output?.connection(with:AVMediaType.video){
output?.captureStillImageAsynchronously(from:videoConnection, completionHandler: {
(sampleBuffer, error) in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer!)
let dataProvider = CGDataProvider.init(data: imageData! as CFData)
let cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage.init(cgImage: cgImageRef!, scale: 1.0, orientation: .right)
// do something with image
})
}
}}
Related
I am writing an app to get the depth data and disparity data from pictures taken from the camera. I can get the disparity data but not the depth data it always returns nil. I need to get the depth information
and save it as a jpg
I have tried the below code where user can switch between front and back camera and take pictures then the picture we took will be the process
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var ImageView: UIView!
var img:UIImage?
var rgbImage:UIImage?
var captureSession: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var backCamera = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
var frontCamera = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .video, position: .front)
var capturePhotoOut : AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
if #available(iOS 10.2, *){
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
do{
let input = try AVCaptureDeviceInput(device: captureDevice!)
captureSession = AVCaptureSession()
captureSession?.addInput(input)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.frame = view.layer.bounds
ImageView.layer.addSublayer(videoPreviewLayer!)
captureSession?.startRunning()
}catch{
print("error")
}
}
capturePhotoOut = AVCapturePhotoOutput()
capturePhotoOut?.isHighResolutionCaptureEnabled = true
captureSession?.sessionPreset = .photo
captureSession?.addOutput(capturePhotoOut!)
capturePhotoOut!.isDepthDataDeliveryEnabled = capturePhotoOut!.isDepthDataDeliverySupported
capturePhotoOut!.isPortraitEffectsMatteDeliveryEnabled = capturePhotoOut!.isPortraitEffectsMatteDeliverySupported
}
#IBAction func imageCapture(_ sender: Any) {
guard let capturePhotoOutput = self.capturePhotoOut else {return}
let photoSettings = AVCapturePhotoSettings()
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isDepthDataDeliveryEnabled = true
photoSettings.isPortraitEffectsMatteDeliveryEnabled = true
capturePhotoOut?.capturePhoto(with: photoSettings, delegate: self)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let vc = segue.destination as! DepthImageView
vc.img = img
vc.rgbImg = rgbImage
}
extension ViewController : AVCapturePhotoCaptureDelegate{
public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil else{return}
guard let imageData = photo.fileDataRepresentation() else {return}
let detailImage = UIImage.init(data: imageData,scale: 1.0)
rgbImage = detailImage
let nsData = imageData as NSData
let ptr = nsData.bytes.assumingMemoryBound(to: UInt8.self)
let cfDataset = CFDataCreate(nil,ptr,imageData.count)
guard let source = CGImageSourceCreateWithData(cfDataset!,nil) else {return}
guard let auxDataInfo = CGImageSourceCopyAuxiliaryDataInfoAtIndex(source, 0, kCGImageAuxiliaryDataTypeDepth) as? [String : AnyObject] else {
return
}
var depthData: AVDepthData
do {
depthData = try AVDepthData(fromDictionaryRepresentation: auxDataInfo)
if depthData.depthDataType != kCVPixelFormatType_DepthFloat32 {
depthData = depthData.converting(toDepthDataType: kCVPixelFormatType_DepthFloat32)
}
let depthDataMap = depthData.depthDataMap
let ciImage = CIImage(cvPixelBuffer: depthDataMap)
let depthDataMapImage = UIImage(ciImage: ciImage,scale: 1.0,orientation: .down)
img = depthDataMapImage
self.performSegue(withIdentifier: "ImageViewScreen", sender: self)
} catch {
print("Error")
}
}
}
I always get nil at auxDataInfo guard
AVCapturePhoto containing information about AVDepthData. Try to get depth Data from photo
let depthData = photo.depthData
This is the code of my camera, change the front camera to the rear camera, if I take a picture with the back camera, the orientation of the photo is good (original), but if i take a photo with the front camera get the image with the bad orientation.
class TakeSelfieViewController: UIViewController, AVCapturePhotoCaptureDelegate {
var captureSession = AVCaptureSession()
var photoOutput = AVCapturePhotoOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
var sessionOutputSetting = AVCapturePhotoSettings(format: [AVVideoCodecKey:AVVideoCodecJPEG])
var toggle = false
#IBOutlet weak var cameraView: UIView!
#IBOutlet weak var tempImageView: UIImageView!
#IBOutlet weak var adorButton: UIButton!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
previewLayer?.frame = cameraView.bounds
let blurEffect = UIBlurEffect(style: UIBlurEffectStyle.light)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
blurEffectView.frame = adorButton.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
adorButton.addSubview(blurEffectView)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
pickCamera(which: toggle)
}
func pickCamera(which: Bool) {
if (which == true) {
let deviceDescovery = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDualCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.back)
print("back camera")
startCamera(deviceDesc: deviceDescovery!)
toggle = true
} else if (which == false) {
let deviceDescovery = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDualCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.front)
print("front camera")
startCamera(deviceDesc: deviceDescovery!)
toggle = false
}
}
func startCamera(deviceDesc: AVCaptureDeviceDiscoverySession!) {
for device in (deviceDesc.devices)! {
if device.position == AVCaptureDevicePosition.back {
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer!)
captureSession.startRunning()
print("ADD Back")
} else { print("Cannot add input - back") }
}
} catch {
print("Error")
}
} else if (device.position == AVCaptureDevicePosition.front) {
do {
let input = try AVCaptureDeviceInput(device: device)
print(input)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer!)
captureSession.startRunning()
print("ADD Front")
}
} else { print("Cannot add input - front") }
} catch {
print(error)
}
}
}
}
func didPressTakePhoto() {
if let videoConnection = photoOutput.connection(withMediaType: AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecJPEG])
photoOutput.capturePhoto(with: settings, delegate: self)
}
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer!, previewPhotoSampleBuffer: previewPhotoSampleBuffer)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent)
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
self.tempImageView.image = image
self.tempImageView.isHidden = false
self.yellowButton.isHidden = true
self.toggleAction.isHidden = true
self.adorButton.isHidden = true
print("Hola")
}
var didTakePhoto = Bool()
#IBOutlet weak var yellowButton: UIButton!
#IBOutlet weak var toggleAction: UIButton!
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if didTakePhoto {
tempImageView.isHidden = true
yellowButton.isHidden = false
toggleAction.isHidden = false
adorButton.isHidden = false
didTakePhoto = false
print("🍊")
}
}
#IBAction func yellowPressed(_ sender: UIButton) {
captureSession.startRunning()
didTakePhoto = true
didPressTakePhoto()
print("🐶")
}
#IBAction func toggleCamera(_ sender: Any) {
if (toggle == false) {
print("Changing to back camera")
let currentCameraInput: AVCaptureInput = captureSession.inputs[0] as! AVCaptureInput
captureSession.removeInput(currentCameraInput)
toggle = true
pickCamera(which: toggle)
} else if (toggle == true) {
print("Changing to front camera")
let currentCameraInput: AVCaptureInput = captureSession.inputs[0] as! AVCaptureInput
captureSession.removeInput(currentCameraInput)
toggle = false
pickCamera(which: toggle)
}
}
override var prefersStatusBarHidden: Bool {
return true
}
}
How do I solve this? 🤔
Images taken with front camera are mirrored, when you take a picture the image orientation is taken inside the EXIF dictionary of it, or passed inside a dictionary of metadata.
Most of the time when you pass it around as JPG or PNG this value is not taken into account if you don't deal directly with it.
You should experience a similar problem if you take a picture in landscape.
In your capture method, it seems that you are forcing orientation to a fixed value when you should take care of it.
So, lately I have been trying to implement the function of switching the camera view from back to front camera in Swift 3. However, with no luck.
Currently, my default view is from the back camera - I can take pictures with it and then retake. But can anyone help me and show how do I either double tap the screen to switch cameras or simply use the assigned button to switch them? Thank you!
import UIKit
import AVFoundation
import FirebaseDatabase
class CameraView: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
override var prefersStatusBarHidden: Bool {
return true
}
var captureSession : AVCaptureSession!
var stillImageOutput : AVCaptureStillImageOutput!
var previewLayer : AVCaptureVideoPreviewLayer!
#IBOutlet var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
previewLayer?.frame = cameraView.bounds
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080
var backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var error : NSError?
do {
var input = try! AVCaptureDeviceInput(device: backCamera)
if (error == nil && captureSession?.canAddInput(input) != nil) {
captureSession?.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if (captureSession?.canAddOutput(stillImageOutput) != nil) {
captureSession?.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer (session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
captureSession?.startRunning() }
}
} catch {
}
}
#IBOutlet var tempImageView: UIImageView!
#IBAction func didPressTakePhoto(_ sender: UIButton) {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: {
(sampleBuffer, error) in
if sampleBuffer != nil {
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProvider.init(data: imageData as! CFData)
var cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
var image = UIImage (cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
self.tempImageView.image = image
self.tempImageView.isHidden = false
}
})
}
}
var didTakePhoto = Bool()
#IBAction func didPressTakeAnother(_ sender: UIButton) {
if didTakePhoto == true {
tempImageView.isHidden = true
didTakePhoto = false
} else {
captureSession?.startRunning()
didTakePhoto = true
}
}
}
don't see here any problems - here is working solution:
import Foundation
import UIKit
import AVFoundation
class MainViewController: UIViewController {
var tempImage: UIImageView?
var captureSession: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var currentCaptureDevice: AVCaptureDevice?
var usingFrontCamera = false
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
loadCamera()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
videoPreviewLayer!.frame = self.cameraPreviewSurface.bounds
}
#IBAction func switchButtonAction(_ sender: Any) {
usingFrontCamera = !usingFrontCamera
loadCamera()
}
func getFrontCamera() -> AVCaptureDevice?{
let videoDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in videoDevices!{
let device = device as! AVCaptureDevice
if device.position == AVCaptureDevicePosition.front {
return device
}
}
return nil
}
func getBackCamera() -> AVCaptureDevice{
return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
}
func loadCamera() {
if(captureSession == nil){
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
}
var error: NSError?
var input: AVCaptureDeviceInput!
currentCaptureDevice = (usingFrontCamera ? getFrontCamera() : getBackCamera())
do {
input = try AVCaptureDeviceInput(device: currentCaptureDevice)
} catch let error1 as NSError {
error = error1
input = nil
print(error!.localizedDescription)
}
for i : AVCaptureDeviceInput in (self.captureSession?.inputs as! [AVCaptureDeviceInput]){
self.captureSession?.removeInput(i)
}
if error == nil && captureSession!.canAddInput(input) {
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addOutput(stillImageOutput)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
//self.cameraPreviewSurface.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
self.cameraPreviewSurface.layer.addSublayer(videoPreviewLayer!)
DispatchQueue.main.async {
self.captureSession!.startRunning()
}
}
}
}
}
some notes:
cameraPreviewSurface - this is your UIView where camera will show
don't reassign the session, don't just add input, but before add new - remove existing ones,
p.s. code done with swift 3.0.1 / xcode 8.1
Cheers )
Xcode Version : Version 10.1 (10B61)
Swift Version : Swift 4.2
Change AVCaptureSession Capture Source
Refers from Stepan Maksymov Solution
we can simplified by replacing captureSession.inputs
First Create an IBAction Outlet and Connect ViewController to Change Camera View
#IBAction private func changeCamera(_ cameraButton: UIButton) {
usingFrontCamera = !usingFrontCamera
do{
captureSession.removeInput(captureSession.inputs.first!)
if(usingFrontCamera){
captureDevice = getFrontCamera()
}else{
captureDevice = getBackCamera()
}
let captureDeviceInput1 = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput1)
}catch{
print(error.localizedDescription)
}
}
Second step Copy simplified AVCaptureDevice Setting || refer Stepan Maksymov
func getFrontCamera() -> AVCaptureDevice?{
return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices.first
return nil
}
func getBackCamera() -> AVCaptureDevice?{
return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices.first
return nil
}
You can replace #IBAction with function like so.
func changeCamera(){
usingFrontCamera = !usingFrontCamera
do{
captureSession.removeInput(captureSession.inputs.first!)
if(usingFrontCamera){
captureDevice = getFrontCamera()
}else{
captureDevice = getBackCamera()
}
let captureDeviceInput1 = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput1)
}catch{
print(error.localizedDescription)
}
}
In addition to Stepan Maksymov post, I sugest to add this function
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
And call it instead of his post lines:
for i : AVCaptureDeviceInput in (self.captureSession?.inputs as! [AVCaptureDeviceInput]){
self.captureSession?.removeInput(i)
}
This way the cameras will change quicker.
I'm trying to capture an image using AVFoundation and when captured the output image is very dark(Almost looks Like a Black Screen) This is my code and I'm a novice to Swift so any help would Be Appreciated
import UIKit
import AVFoundation
import MobileCoreServices
class ViewController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
#IBOutlet var captureView: UIView!
#IBOutlet var capturedImage: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
for device in devices {
if (device.hasMediaType(AVMediaTypeVideo)) {
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
}
}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func beginSession() {
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.captureView.layer.addSublayer(previewLayer)
previewLayer?.frame = self.captureView.layer.frame
captureSession.startRunning()
}
#IBAction func capture(sender: AnyObject) {
var stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right)
var capturedImage = UIImageView(image: image)
//Show the captured image to
//Save the captured preview to image
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
})
}
}
}
Move this code block
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
to
func beginSession()
I'm trying to capture the image and save it to a variable when I press "myButton". What should I do?
My code is as follows:
import UIKit
import AVFoundation
import MobileCoreServices
class ViewController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
#IBOutlet var myTap: UITapGestureRecognizer!
#IBOutlet weak var myButton: UIButton!
#IBAction func shotPress(sender: UIButton) {
//Save image to variable somehow
})
var stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
for device in devices {
if (device.hasMediaType(AVMediaTypeVideo)) {
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
}
}
}
}
}
func updateDeviceSettings(focusValue : Float, isoValue : Float) {
if let device = captureDevice {
if(device.lockForConfiguration(nil)) {
device.focusMode = AVCaptureFocusMode.ContinuousAutoFocus
device.unlockForConfiguration()
}
}
}
func beginSession() {
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer)
self.view.bringSubviewToFront(myButton)
previewLayer?.frame = self.view.layer.frame
captureSession.startRunning()
}
}
This is what I am using on one of the app that I am working on. Should be helpful for your problem as well.
func capturePicture(){
println("Capturing image")
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
captureSession.addOutput(stillImageOutput)
if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){
stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right)
var imageView = UIImageView(image: image)
imageView.frame = CGRect(x:0, y:0, width:self.screenSize.width, height:self.screenSize.height)
//Show the captured image to
self.view.addSubview(imageView)
//Save the captured preview to image
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
})
}
}
Updated for Swift 3:
var stillImageOutput = AVCaptureStillImageOutput.init()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
self.cameraSession.addOutput(stillImageOutput)
if let videoConnection = stillImageOutput.connection(withMediaType:AVMediaTypeVideo){
stillImageOutput.captureStillImageAsynchronously(from:videoConnection, completionHandler: {
(sampleBuffer, error) in
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
var dataProvider = CGDataProvider.init(data: imageData as! CFData)
var cgImageRef = CGImage.init(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
var image = UIImage.init(cgImage: cgImageRef!, scale: 1.0, orientation: .right)
// do something with image
})
}