Integrating Custom Camera View AVCaptureDevice - ios

I'm trying to integrate a custom camera view and following some slightly outdated code whilst doing so. I've had several errors but believed I've fixed them bar 2.
Here is the current code so far:
import Foundation
import AVFoundation
import UIKit
class setupView : UIViewController {
#IBOutlet var cameraView: UIView!
#IBOutlet var nameTextField: UITextField!
var captureSession = AVCaptureSession()
var stillImageOutput = AVCapturePhotoOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
let session = AVCaptureDeviceDiscoverySession.init(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back)
if let device = session?.devices[0] {
if device.position == AVCaptureDevicePosition.back {
do {
let input = try AVCaptureDeviceInput(device: device )
if captureSession.canAddInput(input){
captureSession.addInput(input)
stillImageOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.bounds = cameraView.frame
previewLayer.position = CGPoint(x: cameraView.frame.width / 2, y:cameraView.frame.height / 2)
}
}
} catch {
}
}
}
}
#IBAction func takePhoto(_ sender: Any) {
}
#IBAction func submitAction(_ sender: Any) {
}
}
I'm currently getting 2 errors:
"Value of type AVCapturePhotoOutput" has no member "outputSettings"
"Value of type "AVCaptureVideoPreviewLayer" has no member
"AVLayerVideoGravityResizeAspectFill"

You are almost there. The problem is some of the AVFoundation classes are deprecated and there is more than one way to take a photo now. Here is the issues with your code.
"Value of type AVCapturePhotoOutput" has no member "outputSettings"
It is because actually AVCapturePhotoOutput don't have any member defined as outputSettings. Check out full documentation of AVCapturePhotoOutput
Actually outputSettings is member of AVCaptureStillImageOutput and the same is deprecated from iOS 10.0
"Value of type "AVCaptureVideoPreviewLayer" has no member
"AVLayerVideoGravityResizeAspectFill"
Again the same mistake, as per your code there is no member for AVCaptureVideoPreviewLayer. In case if you want to set the video preview layer set it like below.
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
As like you mentioned the code is outdated and its using deprecated AVCaptureStillImageOutput
If really want to use AVCapturePhotoOutput then you should follow the below steps.
These are the steps to capture a photo.
Create an AVCapturePhotoOutput object. Use its properties to determine supported capture settings and to enable certain features (for example, whether to capture Live Photos).
Create and configure an AVCapturePhotoSettings object to choose features and settings for a specific capture (for example, whether to enable image stabilization or flash).
Capture an image by passing your photo settings object to the capturePhoto(with:delegate:) method along with a delegate object implementing the AVCapturePhotoCaptureDelegate protocol. The photo capture output then calls your delegate to notify you of significant events during the capture process.
have this below code on your clickCapture method and don't forgot to confirm and implement to delegate in your class.
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160,
]
settings.previewPhotoFormat = previewFormat
self.cameraOutput.capturePhoto(with: settings, delegate: self)
if you would like to know the different way to capturing photo from avfoundation check out my previous SO answer
Also Apple documentation explains very clear for How to use AVCapturePhotoOutput

import AVFoundation
import Foundation
#IBOutlet weak var mainimage: UIImageView!
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] {
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.front) {
captureDevice = device
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
}
func beginSession() {
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
catch {
print("error: \(error.localizedDescription)")
}
guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else {
print("no preview layer")
return
}
self.view.layer.addSublayer(previewLayer)
previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
self.view.addSubview(mainimage)
}
This code is working in my app

Related

How I can access web cam in Mac build using maccatalist?

#IBOutlet weak var viewCam: UIView!
let captureSession = AVCaptureSession()
var captureDevice : AVCaptureDevice?
var previewLayer : AVCaptureVideoPreviewLayer?
captureSession.sessionPreset = AVCaptureSession.Preset.low
// Get all audio and video devices on this machine
let devices = AVCaptureDevice.devices()
// Find the FaceTime HD camera object
for device in devices {
print(device)
if ((device as AnyObject).hasMediaType(AVMediaType.video)) {
print(device)
captureDevice = device as? AVCaptureDevice
}
}
if captureDevice != nil {
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.frame = (self.viewCam.frame)
// Add previewLayer into custom view
self.viewCam.layer.addSublayer(previewLayer!)
// Start camera
} catch {
print(AVCaptureSessionErrorKey.description)
}
}
I am using this code in Mac-catalist (iOS) and it works fine for iPhone iPad but its returning captureDevice nil on Mac.
But when I used same code in Mac native build it worked fine. I am not sure how to achieve this in Mac-catalist.
Please answer below or make comments, thanks in advance.

AVFoundation camera previewlayer doesn't match horizontal orientation (Swift 4)

I've been taking bits and pieces of code from the internet and stackoverflow to get a simple camera app working.
However, I've noticed that if I flip my phone to the sideway position,
I see two problems:
1)the
camera preview layer only takes up half the screen
2)and the camera's orientation doesn't seem to be changing; it stays fixed in the vertical position
My constraints seem to be fine, and if I look at various simulators' UIimageView(Camera's preview layer) in the hortionzal position, the UImage is strecthed properly. So not sure why the camera preview layer is only stretching to half the screen.
(ImagePreview = Camera preview layer)
As for the orientation problem, this seems to be a coding problem?
I looked up some posts on stackoverflow, but I didn't see anything for Swift 4.
Not sure if these is an easy way to do this in Swift 4.
iPhone AVFoundation camera orientation
Here is some of the code from my camera app:
import Foundation
import AVFoundation
import UIKit
class CameraSetup{
var captureSession = AVCaptureSession()
var frontCam : AVCaptureDevice?
var backCam : AVCaptureDevice?
var currentCam: AVCaptureDevice?
var captureInput: AVCaptureDeviceInput?
var captureOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
func captureDevice()
{
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .unspecified)
for device in discoverySession.devices{
if device.position == .front{
frontCam = device
}
else if device.position == .back{
backCam = device}
do {
try backCam?.lockForConfiguration()
backCam?.focusMode = .autoFocus
backCam?.exposureMode = .autoExpose
backCam?.unlockForConfiguration()
}
catch let error{
print(error)}
}
}
func configureCaptureInput(){
currentCam = backCam!
do{
captureInput = try AVCaptureDeviceInput(device: currentCam!)
if captureSession.canAddInput(captureInput!){
captureSession.addInput(captureInput!)
}
}
catch let error{
print(error)
}
}
func configureCaptureOutput(){
captureOutput = AVCapturePhotoOutput()
captureOutput!.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
if captureSession.canAddOutput(captureOutput!){
captureSession.addOutput(captureOutput!)
}
captureSession.startRunning()
}
Here is the PreviewLayer function:
func configurePreviewLayer(view: UIView){
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resize;
cameraPreviewLayer?.zPosition = -1;
view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
cameraPreviewLayer?.frame = view.bounds
}
EDIT:
As suggested I made the move the view.bounds line one line above:
func configurePreviewLayer(view: UIView){
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resize;
cameraPreviewLayer?.zPosition = -1;
cameraPreviewLayer?.frame = view.bounds
view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
However, the problem still persists:
Here is the horizontal view:
I think you should user UIView instead of UIImageView.. and try this :
cameraPreviewlayer?.videoGravity = AVLayerVideoGravityResize;
cameraPreviewlayer?.zPosition = -1;
cameraPreviewlayer?.frame = self.imagePreview.bounds

Why is AVCaptureSession method canAddOutput returning false?

I'm trying to build a camera app, and I'm trying to set up my capture session within viewDidLoad() in my main view controller. For some reason, whenever I run the app on my phone, AVCaptureSession method canAddOutput is evaluated as false:
var captureSession: AVCaptureSession!
var photoOutput: AVCapturePhotoOutput!
var previewLayer : AVCaptureVideoPreviewLayer!
//MARK: Outlets
#IBOutlet weak var previewView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
//Ask permission to camera
let device = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back)
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { (granted: Bool) in
if granted {
print("granted")
//Set up session
if let input = try? AVCaptureDeviceInput(device: device) {
print("Input = device")
if (self.captureSession.canAddInput(input)) {
self.captureSession.addInput(input)
print("Input added to capture session")
if (self.captureSession.canAddOutput(self.photoOutput)) {
print("Output added to capture session")
self.captureSession.addOutput(self.photoOutput)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.frame = self.previewView.bounds
self.previewView.layer.addSublayer(self.previewLayer!)
self.captureSession.startRunning()
print("Session is running")
}
}
}
}
else {
print("Goodbye")
}
})
}
Unfortunately, I can only get it to print up until "Input added to capture session". Any suggestions would help - thanks!
You have to remove previous outputs added in session. you can use for loop for that.
for outputs in captureSession.outputs{ captureSession.removeOutput(outputs) }
then try to add new out put

Unable to use AVCapturePhotoOutput to capture photo swift + xcode

I am working on a custom camera app and the tutorial uses AVCaptureStillImageOutput, which is deprecated for ios 10. I have set up the camera and am now stuck on how to take the photo
Here is my full view where i have the camera
import UIKit
import AVFoundation
var cameraPos = "back"
class View3: UIViewController,UIImagePickerControllerDelegate,UINavigationControllerDelegate {
#IBOutlet weak var clickButton: UIButton!
#IBOutlet var cameraView: UIView!
var session: AVCaptureSession?
var stillImageOutput: AVCapturePhotoOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
clickButton.center.x = cameraView.bounds.width/2
loadCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
}
#IBAction func clickCapture(_ sender: UIButton) {
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
// This is where I need help
}
}
#IBAction func changeDevice(_ sender: UIButton) {
if cameraPos == "back"
{cameraPos = "front"}
else
{cameraPos = "back"}
loadCamera()
}
func loadCamera()
{
session?.stopRunning()
videoPreviewLayer?.removeFromSuperlayer()
session = AVCaptureSession()
session!.sessionPreset = AVCaptureSessionPresetPhoto
var backCamera = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front)
if cameraPos == "back"
{
backCamera = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back)
}
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: backCamera)
} catch let error1 as NSError {
error = error1
input = nil
print(error!.localizedDescription)
}
if error == nil && session!.canAddInput(input) {
session!.addInput(input)
stillImageOutput = AVCapturePhotoOutput()
if session!.canAddOutput(stillImageOutput) {
session!.addOutput(stillImageOutput)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
videoPreviewLayer?.frame = cameraView.bounds
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(videoPreviewLayer!)
session!.startRunning()
} }
}
}
This is where i need help
#IBAction func clickCapture(_ sender: UIButton) {
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo) {
// This is where I need help
}
}
I have gone through the answer here How to use AVCapturePhotoOutput
but i do not understand how to incorporate that code in this code, as it involves declaring a new class
You are almost there.
For Output as AVCapturePhotoOutput
Check out AVCapturePhotoOutput documentation for more help.
These are the steps to capture a photo.
Create an AVCapturePhotoOutput object. Use its properties to
determine supported capture settings and to enable certain features
(for example, whether to capture Live Photos).
Create and configure an AVCapturePhotoSettings object to choose
features and settings for a specific capture (for example, whether
to enable image stabilization or flash).
Capture an image by passing your photo settings object to the
capturePhoto(with:delegate:) method along with a delegate object
implementing the AVCapturePhotoCaptureDelegate protocol. The photo
capture output then calls your delegate to notify you of significant
events during the capture process.
have this below code on your clickCapture method and don't forgot to confirm and implement to delegate in your class.
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160,
]
settings.previewPhotoFormat = previewFormat
self.cameraOutput.capturePhoto(with: settings, delegate: self)
For Output as AVCaptureStillImageOutput
if you intend to snap a photo from video connection. you can follow the below steps.
Step 1: Get the connection
if let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {
// ...
// Code for photo capture goes here...
}
Step 2: Capture the photo
Call the captureStillImageAsynchronouslyFromConnection function on
the stillImageOutput.
The sampleBuffer represents the data that is captured.
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
// ...
// Process the image data (sampleBuffer) here to get an image file we can put in our captureImageView
})
Step 3: Process the Image Data
We will need to to take a few steps to process the image data found in sampleBuffer in order to end up with a UIImage that we can insert into our captureImageView and easily use elsewhere in our app.
if sampleBuffer != nil {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProviderCreateWithCFData(imageData)
let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
let image = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)
// ...
// Add the image to captureImageView here...
}
Step 4: Save the image
Based on your need either save the image to photos gallery or show that in a image view
For more details check out Create custom camera view guide under Snap a Photo

Show camera feed in UIView using AVFoundation

I am trying to show the camera's feed in a UIView. Later I need to be able to analyze video frames, so I need to do this using AVFoundation, as I understand.
What I have so far:
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var camView: UIView!
var captureSession:AVCaptureSession?
var videoPreviewLayer:AVCaptureVideoPreviewLayer?
var videoCaptureDevice: AVCaptureDevice?
var input: AnyObject?
override func viewDidLoad() {
super.viewDidLoad()
videoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
do {
input = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
print("video device error")
}
captureSession = AVCaptureSession()
captureSession?.addInput(input as! AVCaptureInput)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.frame = camView.layer.bounds
captureSession?.startRunning()
}
}
The camView is visible, but it doesn't show anything.
The app asked for permission to use the camera at first run, and have been granted that permission.
Setting a breakpoint and inspecting captureSession, videoPreviewLayer, videoCaptureDevice and input confirms they have all been set.
The video preview layer is not added to the camView. Hence you cannot see the camera session running in the camView.
Add this line:
camView.layer.addSublayer(videoPreviewLayer)

Resources