QR Code Scanning not working - ios

So I have just taken over an iOS project as their first in-house dev, where previously this app had been developed by an agency.
One of the features of the app is that it needs to scan QR codes—by the look of the code the previous developers have followed this tutorial on AppCoda to implement the QR scan. Everything looks fine and I can't see anything wrong with the code yet it isn't working.
I also downloaded the completed tutorial project and that worked when I tried the QR scan. I also tried copying and pasting every single line so that it was identical to the working tutorial yet no luck.
I'm tearing my hair out trying to figure out why it isn't working.
Any help is much appreciated!
enum ScanState : Int {
case newDevice = 1
case resetDevice = 2
case replaceDevice = 3
}
class QRScannerViewController: BaseViewController,AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet var scanZoneView: UIView!
#IBOutlet var scannerVIew: UIView!
#IBOutlet var scanInfoLabel: UILabel!
var capturedQR: String? = nil
var captureSession:AVCaptureSession?
var videoPreviewLayer:AVCaptureVideoPreviewLayer?
var qrCodeFrameView:UIView?
let supportedBarCodes = [AVMetadataObject.ObjectType.qr, AVMetadataObject.ObjectType.code128, AVMetadataObject.ObjectType.code39, AVMetadataObject.ObjectType.code93, AVMetadataObject.ObjectType.upce, AVMetadataObject.ObjectType.pdf417, AVMetadataObject.ObjectType.ean13, AVMetadataObject.ObjectType.aztec]
var type = "leg scan"
var device:Device?
var state:ScanState = .newDevice
override func viewDidLoad() {
super.viewDidLoad()
scanInfoLabel.text = "Scan your existing\nleg QR code"
self.navigationController?.navigationBar.dark()
//self.navigationItem.backBarButtonItem = UIBarButtonItem(title: "", style: .Plain, target: nil, action: nil)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.navigationController?.setNavigationBarHidden(false, animated: true)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
#if NOHARDWARE
moveToNextViewController()
#else
initiateCapture()
#endif
}
func initiateCapture() {
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
var error:NSError?
let input: AnyObject!
do {
input = try AVCaptureDeviceInput(device: captureDevice!) as AVCaptureDeviceInput
} catch let error1 as NSError {
error = error1
input = nil
} catch _ {
input = nil
}
if (error != nil) {
// If any error occurs, simply log the description of it and don't continue any more.
print("\(error?.localizedDescription)")
return
}
// Initialize the captureSession object.
captureSession = AVCaptureSession()
// Set the input device on the capture session.
captureSession?.addInput(input as! AVCaptureInput)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedBarCodes
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = scannerVIew.layer.bounds
scannerVIew.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
qrCodeFrameView?.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView?.layer.borderWidth = 2
scannerVIew.addSubview(qrCodeFrameView!)
scannerVIew.bringSubview(toFront: qrCodeFrameView!)
//qrCapturedLabel.text = "No QR code is detected"
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func metadataOutput(captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects == nil || metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
// Here we use filter method to check if the type of metadataObj is supported
// Instead of hardcoding the AVMetadataObjectTypeQRCode, we check if the type
// can be found in the array of supported bar codes.
if supportedBarCodes.filter({ $0 == metadataObj.type }).count > 0 {
// If the found metadata is equal to the QR code metadata then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj as AVMetadataMachineReadableCodeObject) as! AVMetadataMachineReadableCodeObject
let intersectionRect = barCodeObject.bounds.intersection(self.scanZoneView.frame)
if !intersectionRect.isNull &&
(intersectionRect.size.width * intersectionRect.size.height) > self.scanZoneView.bounds.width * self.scanZoneView.bounds.height / 7 {
qrCodeFrameView?.frame = barCodeObject.bounds
if process(metadataObj.stringValue!) {
captureSession?.stopRunning()
}
}
}
}
#IBAction func didTapCancel(_ sender: AnyObject) {
self.dismiss(animated: true, completion: nil)
}
}
extension QRScannerViewController {
func process(_ scanText : String) -> Bool {
var legCode : String
let codeComponents = scanText.components(separatedBy: ";")
if codeComponents.count > 0 {
legCode = codeComponents[0]
} else {
// Invalid number of parameters seperated by a ;
return false
}
// TODO Validate the LEG to LEG-XXXXX
if legCode.hasPrefix("LEG-") {
let delta: Int64 = 1 * Int64(NSEC_PER_SEC)
let time = DispatchTime.now() + Double(delta) / Double(NSEC_PER_SEC)
DispatchQueue.main.asyncAfter(deadline: time, execute: {
switch self.state {
case .resetDevice:
let realm = try! Realm()
let deviceList = realm.objects(Device.self)
let lc = legCode
self.device = deviceList.filter("legCode = %#", lc).first
if self.device == nil {
// TODO Error message: Device not associated with LEG
let vc = ErrorViewController.createErrorViewController(.DeviceNotFound)
self.present(vc, animated: true, completion: nil)
return
}
self.moveToNextViewController()
default:
self.presentingViewController?.dismiss(animated: true, completion: nil)
}
});
return true
}
return false
}
func moveToNextViewController() {
let inspectionStoryboard = UIStoryboard(name: "Impact", bundle: nil)
if let resetVC = inspectionStoryboard.instantiateViewController(withIdentifier: ImpactDetectionViewController.storyboardID) as? ImpactDetectionViewController {
resetVC.device = device
// TODO Pass the impact type across too when the G2 API is set
self.navigationController?.pushViewController(resetVC, animated: false)
}
}
#IBAction func cancelToVC(_ segue: UIStoryboardSegue) { }
}
EDIT
By not working I mean the delegate for AVCaptureMetadataOutputObjectsDelegate is never being called so it never seems to be detecting a QR code. In the AppCoda tutorial it overlays a green square where it detects the QR code but this never happens when I put that code into this app.
The camera is actually running but QR codes are never deteced.

Turns out the answer was deceptively (and annoyingly) simple! Of course in Swift 4 the delegates have been renamed slightly. To fix all I had to was change
func metadataOutput(captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
to
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

You're not connecting your class to AVCaptureMetadataOutput's delegate, that's why the AVCaptureMetadataOutputObjectsDelegate functions are not being invoked. More info: https://developer.apple.com/documentation/avfoundation/avcapturemetadataoutputobjectsdelegate
Try this:
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureMetadataOutput.metadataObjectsDelegate = self // assign self to the delegate.
captureSession?.addOutput(captureMetadataOutput)

Related

iOS barcode scanner App crashes with no crash log

I am trying to make a barcode scanner app. As soon as the camera session begins, the app crashes within a few seconds.
I am unable to find the reason behind this. and, how to fix this one.
I have used https://www.appcoda.com/barcode-reader-swift/ to make the barcde scanner.
import Foundation
import UIKit
import AVFoundation
import CoreData
enum BarcodeScanError : String{
case cameraLoadFailed = "Camera Load Failed"
case NoValidBarcode = "No Valid Barcode"
}
class ScanBoardingPassViewController : UIViewController {
//MARK: - Properties
var viewModel : ScanBoardingPassViewModel? = nil
var captureSession : AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
private let supportedCodeTypes = [AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417]
//MARK: - Outlets
#IBOutlet weak var btnCancel: UIButton!
//MARK: - View Life Cycle
override func viewDidLoad() {
viewModel = ScanBoardingPassViewModel()
self.captureSession = AVCaptureSession()
self.setUpView()
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: - Set Up View
func setUpView() {
self.setUpBarCodeScanner()
self.view.bringSubviewToFront(self.btnCancel)
self.setUpBarcodeRecognizerFrame()
}
private func setUpBarCodeScanner() {
// Get the back-facing camera for capturing videos
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
debugPrint(BarcodeScanError.cameraLoadFailed)
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
}
private func setUpBarcodeRecognizerFrame() {
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubviewToFront(qrCodeFrameView)
}
}
//MARK: - Outlets
#IBAction func btnCancelPressed(_ sender: UIButton) {
self.dismissView()
}
func dismissView() {
self.dismiss(animated: true, completion: nil)
}
}
extension ScanBoardingPassViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
debugPrint(BarcodeScanError.NoValidBarcode)
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
captureSession?.stopRunning()
debugPrint("Valid Barcode found \(metadataObj.stringValue!)")
if let boardingPass = viewModel?.parseBoardingPassString(boardingPassString : metadataObj.stringValue!) {
let unitOfWork = UnitOfWork(context:( UIApplication.shared.delegate as! AppDelegate).persistentContainer.newBackgroundContext() )
unitOfWork.boardingPassRepository.saveBoardingPasses(boardingPass: boardingPass)
unitOfWork.saveChanges()
print(unitOfWork.boardingPassRepository.getBoardingPasses(predicate: nil))
self.dismissView()
}
}
}
}
}
The camera doesnt get struck. But, the app gives an lldb everytime within a few seconds.
// Created by Satya Narayana on 17/11/20.
//
import UIKit
import AVFoundation
import UIKit
class QRViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
//MARK: Outlets
#IBOutlet weak var qrLbl: UILabel! // BarCode displaying Label
#IBOutlet weak var sView: UIView! // View
//MARK: Variables
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
//MARK: View Methods
override func viewDidLoad() {
super.viewDidLoad()
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr, .ean13, .code128]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = sView.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
sView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
self.showToast(message: "Scanning not supported.Your device does not support scanning a code from an item. Please use a device with a camera.", seconds: 1.0)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
qrLbl.isHidden = true
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
//MARK:- Found BARCODE
func found(code: String) {
print(code)
if code != ""{
print(code) // This is Barcode
qrLbl.text = code
}else{
// if you need run again uncomment below line
//self.captureSession.startRunning()
}
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}

QR Code Scanner won't work in Swift 4 [duplicate]

This question already has answers here:
Barcode on swift 4
(4 answers)
Closed 5 years ago.
I've tried following and downloading this QR Code Scanner sample project by Simon Ng, https://www.appcoda.com/barcode-reader-swift/
However, when I try to simulate the sample project to my iPhone, the camera doesn't work. The camera privacy alert doesn't appear and the screen is just white. I've already enabled NSCamera privacy in Info.plist but it doesn't seem to do the job.
QRScannerController.
import UIKit
import AVFoundation
class QRScannerController: UIViewController {
#IBOutlet var messageLabel:UILabel!
#IBOutlet var topbar: UIView!
var captureSession = AVCaptureSession()
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
private let supportedCodeTypes = [AVMetadataObject.ObjectType.upce,
AVMetadataObject.ObjectType.code39,
AVMetadataObject.ObjectType.code39Mod43,
AVMetadataObject.ObjectType.code93,
AVMetadataObject.ObjectType.code128,
AVMetadataObject.ObjectType.ean8,
AVMetadataObject.ObjectType.ean13,
AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417,
AVMetadataObject.ObjectType.itf14,
AVMetadataObject.ObjectType.dataMatrix,
AVMetadataObject.ObjectType.interleaved2of5,
AVMetadataObject.ObjectType.qr]
override func viewDidLoad() {
super.viewDidLoad()
// Get the back-facing camera for capturing videos
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
guard let captureDevice = deviceDiscoverySession.devices.first else {
print("Failed to get the camera device")
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession.startRunning()
// Move the message label and top bar to the front
view.bringSubview(toFront: messageLabel)
view.bringSubview(toFront: topbar)
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubview(toFront: qrCodeFrameView)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Helper methods
func launchApp(decodedURL: String) {
if presentedViewController != nil {
return
}
let alertPrompt = UIAlertController(title: "Open App", message: "You're going to open \(decodedURL)", preferredStyle: .actionSheet)
let confirmAction = UIAlertAction(title: "Confirm", style: UIAlertActionStyle.default, handler: { (action) -> Void in
if let url = URL(string: decodedURL) {
if UIApplication.shared.canOpenURL(url) {
UIApplication.shared.open(url, options: [:], completionHandler: nil)
}
}
})
let cancelAction = UIAlertAction(title: "Cancel", style: UIAlertActionStyle.cancel, handler: nil)
alertPrompt.addAction(confirmAction)
alertPrompt.addAction(cancelAction)
present(alertPrompt, animated: true, completion: nil)
}
}
extension QRScannerController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
messageLabel.text = "No QR code is detected"
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
launchApp(decodedURL: metadataObj.stringValue!)
messageLabel.text = metadataObj.stringValue
}
}
}
}
Before start capturing check whether the camera permission is granted or not. If it's not granted ask permission after getting authorization start capture. for reference use below code snippet
import UIKit
import AVFoundation
class QRScannerVC: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var qrCodeView:UIView!
var backBtn:UIBarButtonItem!
var captureSession:AVCaptureSession?
var videoPreviewLayer:AVCaptureVideoPreviewLayer?
var qrCodeFrameView:UIView?
let supportedCodeTypes = [AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeAztecCode,
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode]
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(true)
self.askPermission()
}
func askPermission() {
let cameraMediaType = AVMediaTypeVideo
let cameraAuthorizationStatus = AVCaptureDevice.authorizationStatus(forMediaType: cameraMediaType)
switch cameraAuthorizationStatus {
case .authorized:
self.startVideoCapture()
break
case .restricted,.denied,.notDetermined:
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) { granted in
if !granted {
let alert = ViewUtils.getDialog(title: "Allow App access to your Camera", message: "App need access to Camera to scan QR code")
alert.addAction(UIAlertAction(title: "Allow Access", style: .default) { action in
if let url = NSURL(string:UIApplicationOpenSettingsURLString) {
UIApplication.shared.openURL(url as URL)
}
})
alert.addAction(UIAlertAction(title: "Not Now", style: .default) { action in
})
self.present(alert, animated: true, completion: nil)
}
else{
self.startVideoCapture()
}
}
default: break
}
}
func startVideoCapture(){
// Get an instance of the AVCaptureDevice class to initialize a device object and provide the video as the media type parameter.
let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Initialize the captureSession object.
captureSession = AVCaptureSession()
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.frame = self.qrCodeView.bounds
self.qrCodeView.layer.addSublayer(videoPreviewLayer!)
self.qrCodeView.clipsToBounds = true
// Start video capture.
captureSession?.startRunning()
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
self.qrCodeView.addSubview(qrCodeFrameView)
self.qrCodeView.bringSubview(toFront: qrCodeFrameView)
}
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
}
// MARK: - AVCaptureMetadataOutputObjectsDelegate Methods
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects == nil || metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
print("No QR/barcode is detected")
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
if barCodeObject != nil{
qrCodeFrameView?.frame = barCodeObject!.bounds
}
if metadataObj.stringValue != nil {
print("\(metadataObj.stringValue)")
if metadataObj.stringValue! != ""{
}
}
}
}
}
Hope this will help you

Swift AVCaptureSession Close Open Button Error : Multiple audio/video AVCaptureInputs are not currently supported

I have a working barcode scanner code. When I click the openCamera button, first time everything is good. When I click the closeCamera button, good, but if I click again the openCamera button gives a fatal error. Code and error are below. In fact, is it possible to toggle camera view with one button?
// Barcode Camera Properties
let captureSession = AVCaptureSession()
var captureDevice:AVCaptureDevice?
var captureLayer:AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
self.cameraView.alpha = 0
}
#IBAction func closeCamera(sender: AnyObject) {
self.captureLayer!.hidden = true
self.captureSession.stopRunning()
}
#IBAction func openCamera(sender: AnyObject) {
self.cameraView.alpha = 1
self.cameraView.animate()
setupCaptureSession()
}
//MARK: Session Startup
private func setupCaptureSession(){
self.captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
do {
let deviceInput = try AVCaptureDeviceInput(device: captureDevice) as AVCaptureDeviceInput
//Add the input feed to the session and start it
self.captureSession.addInput(deviceInput)
self.setupPreviewLayer({
self.captureSession.startRunning()
self.addMetaDataCaptureOutToSession()
})
} catch let setupError as NSError {
self.showError(setupError.localizedDescription)
}
}
private func setupPreviewLayer(completion:() -> ()){
self.captureLayer = AVCaptureVideoPreviewLayer(session: captureSession) as AVCaptureVideoPreviewLayer
if let capLayer = self.captureLayer {
capLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
capLayer.frame = self.cameraView.frame
self.view.layer.addSublayer(capLayer)
completion()
} else {
self.showError("An error occured beginning video capture.")
}
}
//MARK: Metadata capture
func addMetaDataCaptureOutToSession() {
let metadata = AVCaptureMetadataOutput()
self.captureSession.addOutput(metadata)
metadata.metadataObjectTypes = metadata.availableMetadataObjectTypes
metadata.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
}
//MARK: Delegate Methods
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
for metaData in metadataObjects {
let decodedData:AVMetadataMachineReadableCodeObject = metaData as! AVMetadataMachineReadableCodeObject
self.pCodeTextField.text = decodedData.stringValue
//decodedData.type
}
}
//MARK: Utility Functions
func showError(error:String) {
let alertController = UIAlertController(title: "Error", message: error, preferredStyle: .Alert)
let dismiss:UIAlertAction = UIAlertAction(title: "OK", style: UIAlertActionStyle.Destructive, handler:{(alert:UIAlertAction) in
alertController.dismissViewControllerAnimated(true, completion: nil)
})
alertController.addAction(dismiss)
self.presentViewController(alertController, animated: true, completion: nil)
}
Error:
* Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '* Multiple audio/video AVCaptureInputs are not currently supported.'
*** First throw call stack:
(0x23f3410b 0x236dae17 0x2946bf73 0x2946b8bf 0x6d0d8 0x6ce28 0x6cebc 0x280a86cd 0x280a8659 0x2809064f 0x280a7fb5 0x28062275 0x280a0e21 0x280a05d3 0x280712f9 0x2806f98b 0x23ef768f 0x23ef727d 0x23ef55eb 0x23e48bf9 0x23e489e5 0x25094ac9 0x280d8ba1 0xa794c 0x23af7873)
libc++abi.dylib: terminating with uncaught exception of type NSException
(lldb)
Your setupCaptureSession() method adds a new input each time it's called (with self.captureSession.addInput(deviceInput)).
But the error message explicitly says that "Multiple audio/video AVCaptureInputs are not currently supported".
So you have to use only one input at a time: don't stack them in self.captureSession like you do.
You could remove the previous one (with removeInput) before adding a new one, for example.
To be sure that all inputs are removed before adding a new one, you could do:
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
captureSession.removeInput(input)
}
}
just before self.captureSession.addInput(deviceInput).
If it still doesn't work... try something different: instead of removing inputs, change your code so that you add the device input just once, not each time the method is called:
if captureSession.inputs.isEmpty {
self.captureSession.addInput(deviceInput)
}
instead of just self.captureSession.addInput(deviceInput).
I just tried that in a Playground and it worked. Now that you have understood the idea, it's your responsibility to make it work in your own app by adapting my solutions, I can't do it for you, even if I wanted to... ;)
2022
Code taken from answer of "Eric Aya", but refactored a lot:
func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInTripleCamera, .builtInTelephotoCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: AVMediaType.video, position: .unspecified)
for device in discoverySession.devices {
if device.position == position {
return device
}
}
return nil
}
func switchCamera() {
guard let currDevicePos = (session.inputs.first as? AVCaptureDeviceInput)?.device.position
else { return }
//Indicate that some changes will be made to the session
session.beginConfiguration()
//Get new input
guard let newCamera = cameraWithPosition(position: (currDevicePos == .back) ? .front : .back )
else {
print("ERROR: Issue in cameraWithPosition() method")
return
}
do {
let newVideoInput = try AVCaptureDeviceInput(device: newCamera)
while session.inputs.count > 0 {
session.removeInput(session.inputs[0])
}
session.addInput(newVideoInput)
} catch let err1 as NSError {
print("Error creating capture device input: \(err1.localizedDescription)")
}
//Commit all the configuration changes at once
session.commitConfiguration()
}

Disable rotation only for AVCaptureVideoPreviewLayer

I have a UITabBarController which has 4 items tabs. One of them is an AvCaptureVideoPreviewLayer (is a barcode scanner). What I would like to do is to disable autorotate only for AVCaptureVideoPreviewLayer (like iOS camera app) but not for the item bar which would rotate with the screen. It is a bit difficult situation because I think that the UITabBarConrtoller doesn't allow you easily to disable rotation.
The code for my camera view is:
import UIKit
import AVFoundation
class ScannerViewController: UIViewController,
// MARK: Properties
/// Manages the data flow from the capture stage through our input devices.
var captureSession: AVCaptureSession!
/// Dispays the data as captured from our input device.
var previewLayer: AVCaptureVideoPreviewLayer!
// MARK: Methods
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.blackColor()
self.captureSession = AVCaptureSession()
// This object represents a physical capture device and the properties associated with that device
let videoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Is useful for capturing the data from the input device
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
// TODO: maybe show an alert
return
}
if (self.captureSession.canAddInput(videoInput)) {
self.captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (self.captureSession.canAddOutput(metadataOutput)) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code, AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code]
} else {
failed()
return
}
// Adds the preview layer to display the captured data. Sets the videoGravity to AspectFill so that it covers the full screen
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession);
self.previewLayer.frame = self.view.layer.bounds;
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.view.layer.addSublayer(previewLayer);
self.captureSession.startRunning();
}
override func viewDidLayoutSubviews() {
self.previewLayer?.frame = self.view.layer.bounds;
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if (self.captureSession.running == false) {
self.captureSession.startRunning();
}
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
if (self.captureSession.running == true) {
self.captureSession.stopRunning();
}
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .Alert)
ac.addAction(UIAlertAction(title: "OK", style: .Default, handler: nil))
presentViewController(ac, animated: true, completion: nil)
self.captureSession = nil
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
self.captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject;
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
foundCode(readableObject.stringValue);
}
}
/// Completes some tasks when a barcode is found.
///
/// - parameter code: The the barcode found.
func foundCode(code: String) {
}
}
I'm not 100% sure if I understand it correctly. But what you could do is:
1 - Subclass your UITabBarController and override shouldAutorotate with something like this:
override var shouldAutorotate: Bool {
guard let viewController = tabBarController?.selectedViewController else { return false }
return viewController.shouldAutorotate
}
This means that the selected UIViewController will define if it should autorotate.
2 - Now that the UITabBarController will ask its ViewControllers if it should autorotate, you have to override shouldAutorotate in every controller as well.
And that's it!
Sorry if I misunderstood your question. If you provide more information it would help.
Take care :)

How can I scan barcodes on iOS? [closed]

Closed. This question does not meet Stack Overflow guidelines. It is not currently accepting answers.
We don’t allow questions seeking recommendations for books, tools, software libraries, and more. You can edit the question so it can be answered with facts and citations.
Closed 1 year ago.
The community reviewed whether to reopen this question 1 year ago and left it closed:
Original close reason(s) were not resolved
Locked. This question and its answers are locked because the question is off-topic but has historical significance. It is not currently accepting new answers or interactions.
How can I simply scan barcodes on iPhone and/or iPad?
We produced the 'Barcodes' application for the iPhone. It can decode QR Codes. The source code is available from the zxing project; specifically, you want to take a look at the iPhone client and the partial C++ port of the core library. The port is a little old, from circa the 0.9 release of the Java code, but should still work reasonably well.
If you need to scan other formats, like 1D formats, you could continue the port of the Java code within this project to C++.
EDIT: Barcodes and the iphone code in the project were retired around the start of 2014.
As with the release of iOS7 you no longer need to use an external framework or library. The iOS ecosystem with AVFoundation now fully supports scanning almost every code from QR over EAN to UPC.
Just have a look at the Tech Note and the AVFoundation programming guide. AVMetadataObjectTypeQRCode is your friend.
Here is a nice tutorial which shows it step by step:
iPhone QR code scan library iOS7
Just a little example on how to set it up:
#pragma mark -
#pragma mark AVFoundationScanSetup
- (void) setupScanner;
{
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
self.session = [[AVCaptureSession alloc] init];
self.output = [[AVCaptureMetadataOutput alloc] init];
[self.session addOutput:self.output];
[self.session addInput:self.input];
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
self.output.metadataObjectTypes = #[AVMetadataObjectTypeQRCode];
self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.preview.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
AVCaptureConnection *con = self.preview.connection;
con.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
[self.view.layer insertSublayer:self.preview atIndex:0];
}
There are two major libraries:
ZXing a library written in Java and then ported to Objective C / C++ (QR code only). And an other port to ObjC has been done, by TheLevelUp: ZXingObjC
ZBar an open source software for reading bar codes, C based.
According to my experiments, ZBar is far more accurate and fast than ZXing, at least on iPhone.
You can find another native iOS solution using Swift 4 and Xcode 9 at below. Native AVFoundation framework used with in this solution.
First part is the a subclass of UIViewController which have related setup and handler functions for AVCaptureSession.
import UIKit
import AVFoundation
class BarCodeScannerViewController: UIViewController {
let captureSession = AVCaptureSession()
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
var initialized = false
let barCodeTypes = [AVMetadataObject.ObjectType.upce,
AVMetadataObject.ObjectType.code39,
AVMetadataObject.ObjectType.code39Mod43,
AVMetadataObject.ObjectType.code93,
AVMetadataObject.ObjectType.code128,
AVMetadataObject.ObjectType.ean8,
AVMetadataObject.ObjectType.ean13,
AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417,
AVMetadataObject.ObjectType.itf14,
AVMetadataObject.ObjectType.dataMatrix,
AVMetadataObject.ObjectType.interleaved2of5,
AVMetadataObject.ObjectType.qr]
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setupCapture()
// set observer for UIApplicationWillEnterForeground, so we know when to start the capture session again
NotificationCenter.default.addObserver(self,
selector: #selector(willEnterForeground),
name: .UIApplicationWillEnterForeground,
object: nil)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// this view is no longer topmost in the app, so we don't need a callback if we return to the app.
NotificationCenter.default.removeObserver(self,
name: .UIApplicationWillEnterForeground,
object: nil)
}
// This is called when we return from another app to the scanner view
#objc func willEnterForeground() {
setupCapture()
}
func setupCapture() {
var success = false
var accessDenied = false
var accessRequested = false
let authorizationStatus = AVCaptureDevice.authorizationStatus(for: .video)
if authorizationStatus == .notDetermined {
// permission dialog not yet presented, request authorization
accessRequested = true
AVCaptureDevice.requestAccess(for: .video,
completionHandler: { (granted:Bool) -> Void in
self.setupCapture();
})
return
}
if authorizationStatus == .restricted || authorizationStatus == .denied {
accessDenied = true
}
if initialized {
success = true
} else {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera,
.builtInTelephotoCamera,
.builtInDualCamera],
mediaType: .video,
position: .unspecified)
if let captureDevice = deviceDiscoverySession.devices.first {
do {
let videoInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(videoInput)
success = true
} catch {
NSLog("Cannot construct capture device input")
}
} else {
NSLog("Cannot get capture device")
}
}
if success {
DispatchQueue.global().async {
self.captureSession.startRunning()
DispatchQueue.main.async {
let captureMetadataOutput = AVCaptureMetadataOutput()
self.captureSession.addOutput(captureMetadataOutput)
let newSerialQueue = DispatchQueue(label: "barCodeScannerQueue") // in iOS 11 you can use main queue
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: newSerialQueue)
captureMetadataOutput.metadataObjectTypes = self.barCodeTypes
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.videoPreviewLayer.videoGravity = .resizeAspectFill
self.videoPreviewLayer.frame = self.view.layer.bounds
self.view.layer.addSublayer(self.videoPreviewLayer)
}
}
initialized = true
} else {
// Only show a dialog if we have not just asked the user for permission to use the camera. Asking permission
// sends its own dialog to th user
if !accessRequested {
// Generic message if we cannot figure out why we cannot establish a camera session
var message = "Cannot access camera to scan bar codes"
#if (arch(i386) || arch(x86_64)) && (!os(macOS))
message = "You are running on the simulator, which does not hae a camera device. Try this on a real iOS device."
#endif
if accessDenied {
message = "You have denied this app permission to access to the camera. Please go to settings and enable camera access permission to be able to scan bar codes"
}
let alertPrompt = UIAlertController(title: "Cannot access camera", message: message, preferredStyle: .alert)
let confirmAction = UIAlertAction(title: "OK", style: .default, handler: { (action) -> Void in
self.navigationController?.popViewController(animated: true)
})
alertPrompt.addAction(confirmAction)
self.present(alertPrompt, animated: true, completion: nil)
}
}
}
func handleCapturedOutput(metadataObjects: [AVMetadataObject]) {
if metadataObjects.count == 0 {
return
}
guard let metadataObject = metadataObjects.first as? AVMetadataMachineReadableCodeObject else {
return
}
if barCodeTypes.contains(metadataObject.type) {
if let metaDataString = metadataObject.stringValue {
captureSession.stopRunning()
displayResult(code: metaDataString)
return
}
}
}
func displayResult(code: String) {
let alertPrompt = UIAlertController(title: "Bar code detected", message: code, preferredStyle: .alert)
if let url = URL(string: code) {
let confirmAction = UIAlertAction(title: "Launch URL", style: .default, handler: { (action) -> Void in
UIApplication.shared.open(url, options: [:], completionHandler: { (result) in
if result {
NSLog("opened url")
} else {
let alertPrompt = UIAlertController(title: "Cannot open url", message: nil, preferredStyle: .alert)
let confirmAction = UIAlertAction(title: "OK", style: .default, handler: { (action) -> Void in
})
alertPrompt.addAction(confirmAction)
self.present(alertPrompt, animated: true, completion: {
self.setupCapture()
})
}
})
})
alertPrompt.addAction(confirmAction)
}
let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: { (action) -> Void in
self.setupCapture()
})
alertPrompt.addAction(cancelAction)
present(alertPrompt, animated: true, completion: nil)
}
}
Second part is the extension of our UIViewController subclass for AVCaptureMetadataOutputObjectsDelegate where we catch the captured outputs.
extension BarCodeScannerViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
handleCapturedOutput(metadataObjects: metadataObjects)
}
}
Update for Swift 4.2
.UIApplicationWillEnterForegroundchanges as UIApplication.willEnterForegroundNotification.
If support for the iPad 2 or iPod Touch is important for your application, I'd choose a barcode scanner SDK that can decode barcodes in blurry images, such as our Scandit barcode scanner SDK for iOS and Android. Decoding blurry barcode images is also helpful on phones with autofocus cameras because the user does not have to wait for the autofocus to kick in.
Scandit comes with a free community price plan and also has a product API that makes it easy to convert barcode numbers into product names.
(Disclaimer: I'm a co-founder of Scandit)
The problem with iPhone camera is that the first models (of which there are tons in use) have a fixed-focus camera that cannot take picture in-focus for distances under 2ft. The images are blurry and distorted and if taken from greater distance there is not enough detail/information from the barcode.
A few companies have developed iPhone apps that can accomodate for that by using advanced de-blurring technologies. Those applications you can find on Apple app store: pic2shop, RedLaser and ShopSavvy. All of the companies have announced that they have also SDKs available - some for free or very preferential terms, check that one out.
with Swift 5 it's Simple and Super fast!!
You just need to add cocoa pods "BarcodeScanner" here is the full code
source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '12.0'
target 'Simple BarcodeScanner'
do
pod 'BarcodeScanner'
end
Make sure add Camera permission in your .plist file
<key>NSCameraUsageDescription</key>
<string>Camera usage description</string>
And add Scanner and handle result in your ViewController this way
import UIKit
import BarcodeScanner
class ViewController: UIViewController, BarcodeScannerCodeDelegate, BarcodeScannerErrorDelegate, BarcodeScannerDismissalDelegate {
override func viewDidLoad() {
super.viewDidLoad()
let viewController = BarcodeScannerViewController()
viewController.codeDelegate = self
viewController.errorDelegate = self
viewController.dismissalDelegate = self
present(viewController, animated: true, completion: nil)
}
func scanner(_ controller: BarcodeScannerViewController, didCaptureCode code: String, type: String) {
print("Product's Bar code is :", code)
controller.dismiss(animated: true, completion: nil)
}
func scanner(_ controller: BarcodeScannerViewController, didReceiveError error: Error) {
print(error)
}
func scannerDidDismiss(_ controller: BarcodeScannerViewController) {
controller.dismiss(animated: true, completion: nil)
}
}
Still and any question or challenges, please check sample application here with full source code
I believe this can be done using AVFramework, here is the sample code to do this
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate
{
#IBOutlet weak var lblQRCodeResult: UILabel!
#IBOutlet weak var lblQRCodeLabel: UILabel!
var objCaptureSession:AVCaptureSession?
var objCaptureVideoPreviewLayer:AVCaptureVideoPreviewLayer?
var vwQRCode:UIView?
override func viewDidLoad() {
super.viewDidLoad()
self.configureVideoCapture()
self.addVideoPreviewLayer()
self.initializeQRView()
}
func configureVideoCapture() {
let objCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var error:NSError?
let objCaptureDeviceInput: AnyObject!
do {
objCaptureDeviceInput = try AVCaptureDeviceInput(device: objCaptureDevice) as AVCaptureDeviceInput
} catch let error1 as NSError {
error = error1
objCaptureDeviceInput = nil
}
objCaptureSession = AVCaptureSession()
objCaptureSession?.addInput(objCaptureDeviceInput as! AVCaptureInput)
let objCaptureMetadataOutput = AVCaptureMetadataOutput()
objCaptureSession?.addOutput(objCaptureMetadataOutput)
objCaptureMetadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
objCaptureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
}
func addVideoPreviewLayer() {
objCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: objCaptureSession)
objCaptureVideoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
objCaptureVideoPreviewLayer?.frame = view.layer.bounds
self.view.layer.addSublayer(objCaptureVideoPreviewLayer!)
objCaptureSession?.startRunning()
self.view.bringSubviewToFront(lblQRCodeResult)
self.view.bringSubviewToFront(lblQRCodeLabel)
}
func initializeQRView() {
vwQRCode = UIView()
vwQRCode?.layer.borderColor = UIColor.redColor().CGColor
vwQRCode?.layer.borderWidth = 5
self.view.addSubview(vwQRCode!)
self.view.bringSubviewToFront(vwQRCode!)
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
if metadataObjects == nil || metadataObjects.count == 0 {
vwQRCode?.frame = CGRectZero
lblQRCodeResult.text = "QR Code wans't found"
return
}
let objMetadataMachineReadableCodeObject = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if objMetadataMachineReadableCodeObject.type == AVMetadataObjectTypeQRCode {
let objBarCode = objCaptureVideoPreviewLayer?.transformedMetadataObjectForMetadataObject(objMetadataMachineReadableCodeObject as AVMetadataMachineReadableCodeObject) as! AVMetadataMachineReadableCodeObject
vwQRCode?.frame = objBarCode.bounds;
if objMetadataMachineReadableCodeObject.stringValue != nil {
lblQRCodeResult.text = objMetadataMachineReadableCodeObject.stringValue
}
}
}
}
Here is simple code:
func scanbarcode()
{
view.backgroundColor = UIColor.blackColor()
captureSession = AVCaptureSession()
let videoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.frame = view.layer.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
view.layer.addSublayer(previewLayer);
view.addSubview(closeBtn)
view.addSubview(backimg)
captureSession.startRunning();
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .Alert)
ac.addAction(UIAlertAction(title: "OK", style: .Default, handler: nil))
presentViewController(ac, animated: true, completion: nil)
captureSession = nil
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.running == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.running == true) {
captureSession.stopRunning();
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject;
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
foundCode(readableObject.stringValue);
}
// dismissViewControllerAnimated(true, completion: nil)
}
func foundCode(code: String) {
var createAccountErrorAlert: UIAlertView = UIAlertView()
createAccountErrorAlert.delegate = self
createAccountErrorAlert.title = "Alert"
createAccountErrorAlert.message = code
createAccountErrorAlert.addButtonWithTitle("ok")
createAccountErrorAlert.addButtonWithTitle("Retry")
createAccountErrorAlert.show()
NSUserDefaults.standardUserDefaults().setObject(code, forKey: "barcode")
NSUserDefaults.standardUserDefaults().synchronize()
ItemBarcode = code
print(code)
}
override func prefersStatusBarHidden() -> Bool {
return true
}
override func supportedInterfaceOrientations() -> UIInterfaceOrientationMask {
return .Portrait
}
If you are developing for iOS >10.2 with Swift 4 then you can try my solution. I mixed up this and this tutorial and came up with a ViewController which scans a QR Code and print() it out. I also have a Switch in my UI to toggle the camera light, might be helpful as well. For now I only tested it on a iPhone SE, please let me know if it doesn't work on newer iPhones.
Here you go:
import UIKit
import AVFoundation
class QRCodeScanner: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let captureSession: AVCaptureSession = AVCaptureSession()
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
let qrCodeFrameView: UIView = UIView()
var captureDevice: AVCaptureDevice?
override func viewDidLoad() {
// Get the back-facing camera for capturing videos
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera], mediaType: AVMediaType.video, position: .back)
captureDevice = deviceDiscoverySession.devices.first
if captureDevice == nil {
print("Failed to get the camera device")
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice!)
// Set the input device on the capture session.
captureSession.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
if let videoPreviewLayer = videoPreviewLayer {
videoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer)
// Start video capture.
captureSession.startRunning()
if let hasFlash = captureDevice?.hasFlash, let hasTorch = captureDevice?.hasTorch {
if hasFlash && hasTorch {
view.bringSubview(toFront: bottomBar)
try captureDevice?.lockForConfiguration()
}
}
}
// QR Code Overlay
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubview(toFront: qrCodeFrameView)
} catch {
// If any error occurs, simply print it out and don't continue any more.
print("Error: \(error)")
return
}
}
// MARK: Buttons and Switch
#IBAction func switchFlashChanged(_ sender: UISwitch) {
do {
if sender.isOn {
captureDevice?.torchMode = .on
} else {
captureDevice?.torchMode = .off
}
}
}
// MARK: AVCaptureMetadataOutputObjectsDelegate
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView.frame = CGRect.zero
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
// If the found metadata is equal to the QR code metadata then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView.frame = barCodeObject!.bounds
print("QR Code: \(metadataObj.stringValue)")
}
}
}
you can check ZBarSDK to reads QR Code and ECN/ISBN codes it's simple to integrate try the following code.
- (void)scanBarcodeWithZBarScanner
{
// ADD: present a barcode reader that scans from the camera feed
ZBarReaderViewController *reader = [ZBarReaderViewController new];
reader.readerDelegate = self;
reader.supportedOrientationsMask = ZBarOrientationMaskAll;
ZBarImageScanner *scanner = reader.scanner;
// TODO: (optional) additional reader configuration here
// EXAMPLE: disable rarely used I2/5 to improve performance
[scanner setSymbology: ZBAR_I25
config: ZBAR_CFG_ENABLE
to: 0];
//Get the return value from controller
[reader setReturnBlock:^(BOOL value) {
}
and in didFinishPickingMediaWithInfo we get bar code value.
- (void) imagePickerController: (UIImagePickerController*) reader
didFinishPickingMediaWithInfo: (NSDictionary*) info
{
// ADD: get the decode results
id<NSFastEnumeration> results =
[info objectForKey: ZBarReaderControllerResults];
ZBarSymbol *symbol = nil;
for(symbol in results)
// EXAMPLE: just grab the first barcode
break;
// EXAMPLE: do something useful with the barcode data
barcodeValue = symbol.data;
// EXAMPLE: do something useful with the barcode image
barcodeImage = [info objectForKey:UIImagePickerControllerOriginalImage];
[_barcodeIV setImage:barcodeImage];
//set the values for to TextFields
[self setBarcodeValue:YES];
// ADD: dismiss the controller (NB dismiss from the *reader*!)
[reader dismissViewControllerAnimated:YES completion:nil];
}
The simplest way is to use 3rd party framework with minimum UI that can be improved. Check QRCodeScanner83
You can simply use the following code (check the documentation on how to create view controller in your storyboard):
import QRCodeScanner83
guard let vc = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(identifier: "CodeScannerViewController") as? CodeScannerViewController else {
return
}
vc.callbackCodeScanned = { code in
print("SCANNED CODE: \(code)")
vc.dismiss(animated: true, completion: nil)
}
self.present(vc, animated: true, completion: nil)

Resources