How to get Camera Calibration Data on iOS? aka AVCameraCalibrationData - ios

I'm working on an app which need AVCameraCalibrationData. My app is crashing and through an exception.
AVCaptureDataOutputSynchronizer initWithDataOutputs:] Unsupported AVCaptureOutput in dataOutputs - <AVCapturePhotoOutput: 0x283d6ab80>'
I have tried some other workarounds but depthDataOutput is not being called. Please look at my camera configuration. Any help would be much appreciated.
class ViewController: UIViewController {
#IBOutlet var image_view: UIImageView!
#IBOutlet var capture_button: UIButton!
var captureSession: AVCaptureSession?
var sessionOutput: AVCapturePhotoOutput?
var depthOutput: AVCaptureDepthDataOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
var outputSynchronizer: AVCaptureDataOutputSynchronizer?
let dataOutputQueue = DispatchQueue(label: "data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
#IBAction func capture(_ sender: Any) {
self.sessionOutput?.capturePhoto(with: AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]), delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
self.previewLayer?.removeFromSuperlayer()
self.image_view.image = UIImage(data: photo.fileDataRepresentation()!)
print(photo.cameraCalibrationData) // is nil
let depth_map = photo.depthData?.depthDataMap
print("depth_map:", depth_map) // is nil
}
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
print("depth data") // never called
}
override func viewDidLoad() {
super.viewDidLoad()
self.captureSession = AVCaptureSession()
self.captureSession?.sessionPreset = .photo
self.sessionOutput = AVCapturePhotoOutput()
self.depthOutput = AVCaptureDepthDataOutput()
self.depthOutput?.setDelegate(self, callbackQueue: dataOutputQueue)
do {
let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
let input = try AVCaptureDeviceInput(device: device!)
if(self.captureSession?.canAddInput(input))!{
self.captureSession?.addInput(input)
if(self.captureSession?.canAddOutput(self.sessionOutput!))!{
self.captureSession?.addOutput(self.sessionOutput!)
if(self.captureSession?.canAddOutput(self.depthOutput!))!{
self.captureSession?.addOutput(self.depthOutput!)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession!)
self.previewLayer?.frame = self.image_view.bounds
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.image_view.layer.addSublayer(self.previewLayer!)
}
}
}
if sessionOutput!.isDepthDataDeliverySupported {
sessionOutput?.isDepthDataDeliveryEnabled = true
depthOutput?.connection(with: .depthData)!.isEnabled = true
depthOutput?.isFilteringEnabled = true
outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [sessionOutput!, depthOutput!]) // app crash on that line of code
outputSynchronizer!.setDelegate(self, queue: self.dataOutputQueue)
}
} catch {}
self.captureSession?.startRunning()
}
}
#available(iOS 11.0, *)
extension ViewController: AVCaptureDataOutputSynchronizerDelegate , AVCaptureDepthDataOutputDelegate, AVCapturePhotoCaptureDelegate {
#available(iOS 11.0, *)
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
}
}

Related

Overlay image over custom camera

I have made a custom camera and want to overlay another image over it. I am using AVKit now to get the custom camera. I was able to overlay the image when I was using the built-in camera. This is the code for what I have for the custom camera. "newImage" is the image that i would like to overlay over the camera.
import UIKit
import AVKit
class liveView: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet weak var previewView: UIView!
#IBOutlet weak var captureImageView: UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
var newImage: UIImage!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
// videoPreviewLayer?.frame = self.newImage.accessibilityFrame
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func didTakePhoto(_sender : UIBarButtonItem) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

iPhone 7+, ios 11.2: Depth data delivery is not supported in the current configuration

This bug is driving me mad. I'm trying to produce the absolute minimal code to get AVDepthData from an iPhone 7+ using its DualCam.
I have this code:
//
// RecorderViewController.swift
// ios-recorder-app
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
#IBOutlet weak var previewView: UIView!
#IBAction func onTapTakePhoto(_ sender: Any) {
guard let capturePhotoOutput = self.capturePhotoOutput else { return }
let photoSettings = AVCapturePhotoSettings()
photoSettings.isDepthDataDeliveryEnabled = true //Error
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var session: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .depthData, position: .back)
do {
print(captureDevice!)
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.capturePhotoOutput = AVCapturePhotoOutput()
self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true //Error
self.session = AVCaptureSession()
self.session?.addInput(input)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.frame = view.layer.bounds
previewView.layer.addSublayer(self.videoPreviewLayer!)
self.session?.addOutput(self.capturePhotoOutput!)
self.session?.startRunning()
} catch {
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print(photo.depthData)
}
}
If I comment out the lines that are marked with "Error" the code works as I would expect, and prints nil for depthData.
However, leaving the lines as they are, I get an exception. The error message states: AVCapturePhotoOutput setDepthDataDeliveryEnabled:] Depth data delivery is not supported in the current configuration.
How do I change the "current configuration" so that depth delivery is supported?
I've watched this video: https://developer.apple.com/videos/play/wwdc2017/507/ which was helpful, and I believe I've followed the exact steps required to make this work.
Any tips would be gratefully received!
There are two things that I needed to fix.
Set a sessionPreset to a format that supports depth, such as .photo.
Add the cameraPhotoOutput to session before setting .isDepthDataDeliveryEnabled = true.
Here is my minimal code for getting depth with photos:
//
// RecorderViewController.swift
// ios-recorder-app
//
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
#IBOutlet weak var previewView: UIView!
#IBAction func onTapTakePhoto(_ sender: Any) {
guard var capturePhotoOutput = self.capturePhotoOutput else { return }
var photoSettings = AVCapturePhotoSettings()
photoSettings.isDepthDataDeliveryEnabled = true
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var session: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
print(captureDevice!.activeDepthDataFormat)
do{
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.capturePhotoOutput = AVCapturePhotoOutput()
self.session = AVCaptureSession()
self.session?.beginConfiguration()
self.session?.sessionPreset = .photo
self.session?.addInput(input)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.frame = self.view.layer.bounds
self.previewView.layer.addSublayer(self.videoPreviewLayer!)
self.session?.addOutput(self.capturePhotoOutput!)
self.session?.commitConfiguration()
self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true
self.session?.startRunning()
}
catch{
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print(photo.depthData)
}
}

Swift 3 - How to capture image and its subview?

I am working on a project that contains custom camera view.And i am unable to capture the subview over the main view.My main view consist of AVCaptureSession and i want to take photo of superview and subview both in single image.
What i am trying in code:
class ViewController: UIViewController {
#IBOutlet weak var cameraButton: UIButton!
var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentDevice: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer:AVCaptureVideoPreviewLayer?
var image: UIImage?
var toggleCameraGestureRecognizer = UISwipeGestureRecognizer()
var zoomInGestureRecognizer = UISwipeGestureRecognizer()
var zoomOutGestureRecognizer = UISwipeGestureRecognizer()
override func viewDidLoad() {
super.viewDidLoad()
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
captureSession.startRunning()
toggleCameraGestureRecognizer.direction = .up
toggleCameraGestureRecognizer.addTarget(self, action: #selector(self.switchCamera))
view.addGestureRecognizer(toggleCameraGestureRecognizer)
// Zoom In recognizer
zoomInGestureRecognizer.direction = .right
zoomInGestureRecognizer.addTarget(self, action: #selector(zoomIn))
view.addGestureRecognizer(zoomInGestureRecognizer)
// Zoom Out recognizer
zoomOutGestureRecognizer.direction = .left
zoomOutGestureRecognizer.addTarget(self, action: #selector(zoomOut))
view.addGestureRecognizer(zoomOutGestureRecognizer)
styleCaptureButton()
}
cameraButton.layer.borderColor = UIColor.white.cgColor
cameraButton.layer.borderWidth = 5
cameraButton.clipsToBounds = true
cameraButton.layer.cornerRadius = min(cameraButton.frame.width, cameraButton.frame.height) / 2
}
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
currentDevice = backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput!.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer() {
self.cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraPreviewLayer?.frame = view.frame
self.view.layer.insertSublayer(self.cameraPreviewLayer!, at: 0)
}
#IBAction func cameraButton_TouchUpInside(_ sender: Any) {
let settings = AVCapturePhotoSettings()
self.photoOutput?.capturePhoto(with: settings, delegate: self)
}
}
extension ViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
self.image = UIImage(data: imageData)
performSegue(withIdentifier: "Preview_Segue", sender: nil)
}
}
}
Please help me
If I understood you are trying to get the content (as an image) of what the camera is grabbing and some overlay views.
As far as I remember is not possible to grab what is inside the AVPreviewLayer, maybe they changed something in the latest version. When I tried (iOS6) it wasn't possible, the area with the AVPreviewLayer was always empty.
What you can do is take the current camera buffer and draw inside it. By setting a class as a session delegate you can receive this callback optional
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection)
Here you will receive the image from the camera, this buffer can be converted into images using Accelerate framework or CoreImage.
Is not easy, but also not impossible.

AVCapturePhotoOutput has no member 'captureStillImageAsynchronously'

I am making a custom camera app in which i can i capture my photos by takePhoto button, but there is an error in takePhoto saying AVCapturePhotoOutput has no member 'captureStillImageAsynchronously'.
ViewController
import UIKit
import AVFoundation
import CoreImage
#available(iOS 10.0, *)
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var sessionOutput = AVCapturePhotoOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
#IBOutlet weak var imageViewReal: UIImageView!
#IBOutlet weak var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let deviceSession = AVCaptureDeviceDiscoverySession.init(deviceTypes: [.builtInDuoCamera, .builtInTelephotoCamera, .builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .unspecified)
for device in (deviceSession?.devices)! {
if (device as AnyObject).position == AVCaptureDevicePosition.front {
do {
let input = try AVCaptureDeviceInput(device: device )
if captureSession.canAddInput(input) {
captureSession.addInput(input)
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = .portrait
cameraView.layer.addSublayer(previewLayer)
cameraView.addSubview(takePhoto)
previewLayer.position = CGPoint (x: self.imageViewReal.frame.width / 2, y: self.imageViewReal.frame.height / 2 )
previewLayer.bounds = imageViewReal.frame
captureSession.startRunning()
}
}
}
catch {
print("Error")
}
}
}
}
#IBAction func takePhoto(_ sender: Any) {
if let videoConnection = sessionOutput.connection(withMediaType: AVMediaTypeVideo) {
sessionOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: {
buffer, Error in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data : imageData!)!, nil, nil, nil)
})
}
}
}
If your app is compatible with iOS9, use AVCaptureStillImageOutput's method:
func captureStillImageAsynchronously(from connection: AVCaptureConnection!,
completionHandler handler: ((CMSampleBuffer?, Error?) -> Void)!)
If your app begins from iOS10, you should use AVCapturePhotoOutput's method:
open func capturePhoto(with settings: AVCapturePhotoSettings, delegate: AVCapturePhotoCaptureDelegate)
So,var sessionOutput = AVCapturePhotoOutput() should be var sessionOutput = AVCaptureStillImageOutput().
Have a nice try.

How to capture image only custom camera view area in Swift3

I have the custom camera view. What I want is that I only want to capture the image inside of custom camera view when I press button.But it take the whole screen not just camera view. I also set preview layer's frame to cameraView's frame. Here is my code
class CustomCameraVC: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet weak var cameraView: UIView!
#IBOutlet weak var shotImage: UIImageView!
var captureSession: AVCaptureSession!
var imageOutput: AVCapturePhotoOutput!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSessionPresetMedium
imageOutput = AVCapturePhotoOutput()
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
if captureSession.canAddOutput(imageOutput) {
captureSession.addOutput(imageOutput)
captureSession.startRunning()
let captureVideoLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer.init(session: captureSession)
captureVideoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
captureVideoLayer.frame = self.cameraView.layer.frame
self.cameraView.layer.addSublayer(captureVideoLayer)
}
}
} catch {
print("error")
}
}
#IBAction func takePhoto(_ sender: UIButton) {
let settingsForMonitoring = AVCapturePhotoSettings()
settingsForMonitoring.flashMode = .auto
settingsForMonitoring.isAutoStillImageStabilizationEnabled = true
settingsForMonitoring.isHighResolutionPhotoEnabled = false
imageOutput?.capturePhoto(with: settingsForMonitoring, delegate: self)
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let photoSampleBuffer = photoSampleBuffer {
let photoData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer)
let image = UIImage(data: photoData!)
shotImage.image = UIImage(data: photoData!)
UIImageWriteToSavedPhotosAlbum(image!, nil, nil, nil)
}
}
}

Resources