AVFoundation camera focus stops working after switching from barcode scanner - ios

I have an application that uses RSBarcodes_Swift to scan for barcodes.
When the scan is successful i need to segue to camera view controller to take pictures. The problem i have is that after the barcode scanner, AVFoundation touch to focus stops working. What i mean by this is that whenever i touch to focus, camera just briefly tries to focus and than resets to default lens position. When i don't use barcode scanner and go directly to camera, everything works fine. I have also used a couple of other barcode scanners, but the result is the same. Is there any way that i somehow reset the camera usage, or dispose of barcode scanner when i'm done with it?
This is the code i use to present the camera and have touch to focus capability:
import Foundation
import AVFoundation
public class CameraViewController : UIViewController {
var backCamera: AVCaptureDevice?
var captureSession: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var previewView: UIView?
public override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
previewView = UIView()
previewView!.frame = UIScreen.mainScreen().bounds
let shortTap = UITapGestureRecognizer(target: self, action: #selector(shortTapRecognize))
shortTap.numberOfTapsRequired = 1
shortTap.numberOfTouchesRequired = 1
previewView!.addGestureRecognizer(shortTap)
self.view.addSubview(previewView!)
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: backCamera!)
} catch let error1 as NSError{
error = error1
input = nil
print(error!.localizedDescription)
}
if error == nil && captureSession!.canAddInput(input){
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput){
captureSession!.addOutput(stillImageOutput)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer!.frame = previewView!.bounds
videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
previewView!.layer.addSublayer(videoPreviewLayer!)
captureSession!.startRunning()
}
}
}
public override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
}
public override func viewDidLoad() {
super.viewDidLoad()
}
public override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func shortTapRecognize(tap: UITapGestureRecognizer){
if tap.state == UIGestureRecognizerState.Ended {
let pointInPreview = tap.locationInView(tap.view)
let pointInCamera = videoPreviewLayer!.captureDevicePointOfInterestForPoint(pointInPreview)
if backCamera!.focusPointOfInterestSupported{
do {
try backCamera!.lockForConfiguration()
} catch let error as NSError{
print(error.localizedDescription)
}
backCamera!.focusPointOfInterest = pointInCamera
backCamera!.focusMode = .AutoFocus
backCamera!.unlockForConfiguration()
}
}
}
}

Related

Trying to add cameraPreviewLayer to swift app

could someone help me, Im not understanding what im doing wrong,Im trying to preview cameraInout throughout a AvPreviewVideoLayer, but when opening the app it shows me a black background instead of the camera view.
Thanks,
Here is the code:
var cameraInput : AVCaptureDeviceInput!
var videoSession : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(true)
//Camera of Device
guard let videoCamera = AVCaptureDevice.default(for: .video)
else{
print("Camera Device doesnt work")
return
}
//camera into device input
guard
let cameraInput = try? AVCaptureDeviceInput(device: videoCamera),
videoSession.canAddInput(cameraInput)
else { return }
videoSession.addInput(cameraInput)
self.previewLayer = AVCaptureVideoPreviewLayer(session: videoSession)
self.view.layer.addSublayer(previewLayer)
videoSession.startRunning()
}
}

iOS barcode scanner App crashes with no crash log

I am trying to make a barcode scanner app. As soon as the camera session begins, the app crashes within a few seconds.
I am unable to find the reason behind this. and, how to fix this one.
I have used https://www.appcoda.com/barcode-reader-swift/ to make the barcde scanner.
import Foundation
import UIKit
import AVFoundation
import CoreData
enum BarcodeScanError : String{
case cameraLoadFailed = "Camera Load Failed"
case NoValidBarcode = "No Valid Barcode"
}
class ScanBoardingPassViewController : UIViewController {
//MARK: - Properties
var viewModel : ScanBoardingPassViewModel? = nil
var captureSession : AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
private let supportedCodeTypes = [AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417]
//MARK: - Outlets
#IBOutlet weak var btnCancel: UIButton!
//MARK: - View Life Cycle
override func viewDidLoad() {
viewModel = ScanBoardingPassViewModel()
self.captureSession = AVCaptureSession()
self.setUpView()
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: - Set Up View
func setUpView() {
self.setUpBarCodeScanner()
self.view.bringSubviewToFront(self.btnCancel)
self.setUpBarcodeRecognizerFrame()
}
private func setUpBarCodeScanner() {
// Get the back-facing camera for capturing videos
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
debugPrint(BarcodeScanError.cameraLoadFailed)
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
}
private func setUpBarcodeRecognizerFrame() {
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubviewToFront(qrCodeFrameView)
}
}
//MARK: - Outlets
#IBAction func btnCancelPressed(_ sender: UIButton) {
self.dismissView()
}
func dismissView() {
self.dismiss(animated: true, completion: nil)
}
}
extension ScanBoardingPassViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
debugPrint(BarcodeScanError.NoValidBarcode)
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
captureSession?.stopRunning()
debugPrint("Valid Barcode found \(metadataObj.stringValue!)")
if let boardingPass = viewModel?.parseBoardingPassString(boardingPassString : metadataObj.stringValue!) {
let unitOfWork = UnitOfWork(context:( UIApplication.shared.delegate as! AppDelegate).persistentContainer.newBackgroundContext() )
unitOfWork.boardingPassRepository.saveBoardingPasses(boardingPass: boardingPass)
unitOfWork.saveChanges()
print(unitOfWork.boardingPassRepository.getBoardingPasses(predicate: nil))
self.dismissView()
}
}
}
}
}
The camera doesnt get struck. But, the app gives an lldb everytime within a few seconds.
// Created by Satya Narayana on 17/11/20.
//
import UIKit
import AVFoundation
import UIKit
class QRViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
//MARK: Outlets
#IBOutlet weak var qrLbl: UILabel! // BarCode displaying Label
#IBOutlet weak var sView: UIView! // View
//MARK: Variables
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
//MARK: View Methods
override func viewDidLoad() {
super.viewDidLoad()
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr, .ean13, .code128]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = sView.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
sView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
self.showToast(message: "Scanning not supported.Your device does not support scanning a code from an item. Please use a device with a camera.", seconds: 1.0)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
qrLbl.isHidden = true
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
//MARK:- Found BARCODE
func found(code: String) {
print(code)
if code != ""{
print(code) // This is Barcode
qrLbl.text = code
}else{
// if you need run again uncomment below line
//self.captureSession.startRunning()
}
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}

Swift Playground iPad cameras access

In the last update notes for the Swift Playground app for the iPad I saw that the cameras would be accessible through the playgrounds.
For testing purpose I've therefore created a playground to access my iPads back camera.
As reference I've used those sources:
https://github.com/codepath/ios_guides/wiki/Creating-a-Custom-Camera-View
https://github.com/francip/swift-playground-camera/blob/master/Camera.playground/Contents.swift
Here is my code:
import UIKit
import AVFoundation
import PlaygroundSupport
class MainViewController : UIViewController {
private var _session: AVCaptureSession?
private var _captureInput: AVCaptureInput?
private var _stillImageOutput: AVCaptureStillImageOutput?
private var _frontCamera: AVCaptureDevice?
private var _previewView: UIView?
private var _previewLayer: AVCaptureVideoPreviewLayer?
public override func loadView() {
print("loadView()")
_previewView = UIView()
guard let view = _previewView else {
printError(errMsg: "View could not be created!")
return
}
view.backgroundColor = UIColor.brown
self.view = view
}
public override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a capture session
_session = AVCaptureSession()
guard let session = _session else {
printError(errMsg: "Session is empty!")
return
}
session.beginConfiguration()
session.sessionPreset = AVCaptureSession.Preset.photo
// Get access to front camera
_frontCamera = AVCaptureDevice.default(for: AVMediaType.video)
guard let frontCamera = _frontCamera
else {
printError(errMsg: "Front camera not accessible!")
return
}
do{
_captureInput = try AVCaptureDeviceInput(device: frontCamera)
} catch let err as NSError {
printError(errMsg: err.localizedDescription)
return
}
// Add input to session
guard let captureInput = _captureInput else {
printError(errMsg: "Capture input not available!")
return
}
if session.canAddInput(captureInput){
session.addInput(captureInput)
}
// Configurw the image output
_stillImageOutput = AVCaptureStillImageOutput()
guard let sillImageOutput = _stillImageOutput else {
printError(errMsg: "Image output not available!")
return
}
sillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if session.canAddOutput(sillImageOutput) {
session.addOutput(sillImageOutput)
}
session.commitConfiguration()
// Add video preview layer
_previewLayer = AVCaptureVideoPreviewLayer(session: session)
guard let previewLayer = _previewLayer else {
printError(errMsg: "Preview layer not available!")
return
}
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
previewLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
guard let view = _previewView else {
printError(errMsg: "View not available!")
return
}
view.layer.addSublayer(previewLayer)
view.layer.borderWidth = 5
// Start the capture session
session.startRunning()
}
public override func viewDidLoad() {
super.viewDidLoad()
}
public override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let previewLayer = _previewLayer else {
printError(errMsg: "Preview layer not available!")
return
}
guard let previewView = _previewView else {
printError(errMsg: "View not available!")
return
}
print(previewView.bounds)
previewLayer.frame = previewView.bounds
}
private func printError(errMsg: String) {
print("[Error]: " + errMsg)
}
}
PlaygroundPage.current.liveView = MainViewController()
Please ignore the private variables for the session, etc. I know that I can/should make them method local.
The only thing I see is the brown background of the view.
When running the app for the first time iOS asked me if the Playgrounds app should have access to the camera which I accepted. In the settings I can also see that the app has access to the camera.
iOS Version: 10.3.3
Playground App: 1.6.1
It looks like it works only on iOS 11. I checked your code on iPad with iOS 10.3 and the video preview was empty. The same code was running just fine inside the normal app. On another tablet running iOS 11 the same code was working like a charm.

How to change fps and hide status bar using AVFoundation and a camera view

I am working with AVFoundation and making a fullscreen camera view. I can't seem to be able to change the fps and hide the status bar. I would like the fps to be set at 140 fps (for the iPhone 7) and I would also like the status bar to be hidden (I have changed that in my storyboard files and in the General tab of the Xcode app settings. How can I achieve this? Thanks in advance! (I am using Swift 3.0 and would prefer an answer in Swift 3 (if possible))
Code of ViewController: `class ViewController: UIViewController {
#IBOutlet var cameraView: UIImageView!
let captureSession = AVCaptureSession()
let stillImageOutput = AVCaptureStillImageOutput()
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
func beginSession() {
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
catch {
print("error: \(error.localizedDescription)")
}
guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else {
print("no preview layer")
return
}
self.view.layer.addSublayer(previewLayer)
previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession.sessionPreset = AVCaptureSessionPresetHigh
if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] {
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.back) {
captureDevice = device
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
`
1) To hide a status bar you should add an override of prefersStatusBarHidden: method of UIViewController:
override var prefersStatusBarHidden : Bool {
return true
}
2) To set a constant FPS you can use activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties of AVCaptureDevice instance:
func setFrameRate(_ captureDevice: AVCaptureDevice) {
do {
try captureDevice.lockForConfiguration()
captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, 140)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, 140)
captureDevice.unlockForConfiguration()
} catch {
NSLog("An Error occurred: \(error.localizedDescription))")
}
}

Why doesn't my preview layer stretch over the entire view?

Im making a iPhone app with a AVFoundation camera but the camera is not scaling properly.
I think I have done a lot to make it the same size, I changed the video gravity to ResizeAspectFill and I changed the previewlayer.frame.size to self.layer.frame.size.
Why isn't my preview layer stretching over the entire view? Is it something I have typed wrong or just forgotten that I need to type out? Thanks!
Image: http://imgur.com/O713SoE
code:
import AVFoundation
import UIKit
import QuartzCore
class View1: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer: CALayer!
var captureDevice: AVCaptureDevice!
#IBOutlet weak var photoButton: UIButton!
#IBOutlet weak var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
photoButton.layer.zPosition = 1
}
#IBAction func photoButtonpressed(_ sender: UIButton) {
let button = sender as UIButton
if (button.tag == 1){
print("Photobutton clicked")
}
}
func prepareCamera(){
captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaTypeVideo,
position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession(){
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession){
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.bounds = self.view.bounds
self.previewLayer.contentsGravity = AVLayerVideoGravityResizeAspectFill
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
captureSession.commitConfiguration()
}
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
I copy your code and run on iOS 10.1.1, iPhone6, XCode 8.2.1 it works.
How you load View1? programmatically? initiate in storyboard? the view of View1 might have different size with your device screen.

Resources