Presenting UIViewController in UIView - ios

I have recently started with ios development. So currently I have a requirement to present a QR scanner view when a view is clicked. I came through one library https://github.com/twostraws/CodeScanner. but it doesn't work.
I have also the QR code implementation
import AVFoundation
import UIKit
class QRCode: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
but I am not able to use this inside a class which inherits UIView
The class definition in which I am trying to use the QRCode class
class A: UIView
I have recently started with ios. Any help would be great

if you want to present a view controller from a presented view controller you can do the following
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func presentPressed(sender: Any) {
let mainStoryboard: UIStoryboard = UIStoryboard(name: "Main", bundle: Bundle.main)
let presentedVC = mainStoryboard.instantiateViewController(withIdentifier: "presentedViewController")
present(presentedVC, animated: true, completion: nil)
}
}
class presentedViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func presentQRVCPressed(sender: Any) {
let mainStoryboard = UIStoryboard(name: "Main", bundle: Bundle.main)
let qrVC = mainStoryboard.instantiateViewController(withIdentifier: "QRViewController")
present(qrVC, animated: true, completion: nil)
}
}
class QRViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
}
otherwise if you want to present a view controller when clicking inside a view, do the following
protocol yourCustomViewDelegate: class {
func presentQRPressed()
}
class yourCustomView: UIView {
// MARK: - Properties
weak var delegate: yourCustomViewDelegate?
#IBAction func presentQRButton(sender: Any) {
if let delegate = delegate {
delegate.presentQRPressed()
}
}
}
class yourViewController: UIViewController, yourCustomViewDelegate {
func presentQRPressed() {
present(yourQRVC, animated: true, completion: nil)
}
}

Related

iOS barcode scanner App crashes with no crash log

I am trying to make a barcode scanner app. As soon as the camera session begins, the app crashes within a few seconds.
I am unable to find the reason behind this. and, how to fix this one.
I have used https://www.appcoda.com/barcode-reader-swift/ to make the barcde scanner.
import Foundation
import UIKit
import AVFoundation
import CoreData
enum BarcodeScanError : String{
case cameraLoadFailed = "Camera Load Failed"
case NoValidBarcode = "No Valid Barcode"
}
class ScanBoardingPassViewController : UIViewController {
//MARK: - Properties
var viewModel : ScanBoardingPassViewModel? = nil
var captureSession : AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
private let supportedCodeTypes = [AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417]
//MARK: - Outlets
#IBOutlet weak var btnCancel: UIButton!
//MARK: - View Life Cycle
override func viewDidLoad() {
viewModel = ScanBoardingPassViewModel()
self.captureSession = AVCaptureSession()
self.setUpView()
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: - Set Up View
func setUpView() {
self.setUpBarCodeScanner()
self.view.bringSubviewToFront(self.btnCancel)
self.setUpBarcodeRecognizerFrame()
}
private func setUpBarCodeScanner() {
// Get the back-facing camera for capturing videos
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
debugPrint(BarcodeScanError.cameraLoadFailed)
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
}
private func setUpBarcodeRecognizerFrame() {
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubviewToFront(qrCodeFrameView)
}
}
//MARK: - Outlets
#IBAction func btnCancelPressed(_ sender: UIButton) {
self.dismissView()
}
func dismissView() {
self.dismiss(animated: true, completion: nil)
}
}
extension ScanBoardingPassViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
debugPrint(BarcodeScanError.NoValidBarcode)
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
captureSession?.stopRunning()
debugPrint("Valid Barcode found \(metadataObj.stringValue!)")
if let boardingPass = viewModel?.parseBoardingPassString(boardingPassString : metadataObj.stringValue!) {
let unitOfWork = UnitOfWork(context:( UIApplication.shared.delegate as! AppDelegate).persistentContainer.newBackgroundContext() )
unitOfWork.boardingPassRepository.saveBoardingPasses(boardingPass: boardingPass)
unitOfWork.saveChanges()
print(unitOfWork.boardingPassRepository.getBoardingPasses(predicate: nil))
self.dismissView()
}
}
}
}
}
The camera doesnt get struck. But, the app gives an lldb everytime within a few seconds.
// Created by Satya Narayana on 17/11/20.
//
import UIKit
import AVFoundation
import UIKit
class QRViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
//MARK: Outlets
#IBOutlet weak var qrLbl: UILabel! // BarCode displaying Label
#IBOutlet weak var sView: UIView! // View
//MARK: Variables
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
//MARK: View Methods
override func viewDidLoad() {
super.viewDidLoad()
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr, .ean13, .code128]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = sView.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
sView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
self.showToast(message: "Scanning not supported.Your device does not support scanning a code from an item. Please use a device with a camera.", seconds: 1.0)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
qrLbl.isHidden = true
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
//MARK:- Found BARCODE
func found(code: String) {
print(code)
if code != ""{
print(code) // This is Barcode
qrLbl.text = code
}else{
// if you need run again uncomment below line
//self.captureSession.startRunning()
}
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}

Scanning barcodes one at a time in SwiftUI

I've used this guide to create my scanner:
I first run QRCodeScan() from content view with an overlay:
import SwiftUI
struct ContentView: View {
var body: some View {
ZStack {
QRCodeScan()
ScannerOverlayView()
}
}
}
QRCodeScan:
import SwiftUI
struct QRCodeScan: UIViewControllerRepresentable {
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
func makeUIViewController(context: Context) -> ScannerViewController {
let vc = ScannerViewController()
vc.delegate = context.coordinator
return vc
}
func updateUIViewController(_ vc: ScannerViewController, context: Context) {
}
class Coordinator: NSObject, QRCodeScannerDelegate {
#State var barcode = ""
func codeDidFind(_ code: String) {
barcode = code
FoundItemSheet(barcode: self.$barcode) //This creates a sheet at the bottom of the screen, saying if a product was found with the barcode
}
var parent: QRCodeScan
init(_ parent: QRCodeScan) {
self.parent = parent
}
}
}
ScannerViewController:
protocol QRCodeScannerDelegate {
func codeDidFind(_ code: String)
}
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
var delegate: QRCodeScannerDelegate?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.ean13, .ean8, .qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
self.delegate?.codeDidFind(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
It scans a barcode successfully, but that picture remains on the screen. I want a barcode to be scanned; the code be passed do FoundItemSheet(), then open the scanner again.
I've tried running QRCodeScan() inside of codeDidFind() and calling the ScannerViewController again.

BarCodeScanning Not Working Swift iOS 11

Before you jump and tell me its a duplicate or send me links check out the code.
The camera shows up but the delegate for output is not called.
import AVFoundation
import UIKit
protocol ScannerViewControllerOutputDelegate : class {
func scannerOutput(scannedString: String?)
}
class ScannerViewController: UIViewController {
public weak var delegate: ScannerViewControllerOutputDelegate?
#IBOutlet private weak var cameraView: UIView!
#IBOutlet private weak var lblDataType: UILabel!
#IBOutlet private weak var lblDataInfo: UILabel!
private let captureSession: AVCaptureSession = AVCaptureSession()
private var previewLayer: AVCaptureVideoPreviewLayer!
private var presenter: ScannerViewPresenter?
deinit {
captureSession.stopRunning()
presenter = nil
delegate = nil
}
#IBAction func didTapCancelButton(_ sender: UIBarButtonItem) {
print("ScannerViewController -> USER CANCLED: will hide scannerview controller")
captureSession.stopRunning()
dismiss(animated: true) {
print("ScannerViewController -> USER CANCLED: did hide scannerview controller")
self.delegate?.scannerOutput(scannedString: nil)
}
}
override func viewDidLoad() {
super.viewDidLoad()
presenter = ScannerViewPresenter(withController: self)
self.modalPresentationStyle = .formSheet
view.backgroundColor = .eify_darkGray
guard let videoInput = initializeDeviceCamera() else {
presenter?.deviceNotSupportedAlert()
return
}
guard captureSession.add(captureInput: videoInput) else {
presenter?.deviceNotSupportedAlert()
return
}
guard captureSession.addMetadataOutput(withDelegate: self, andQueue: .global()) else {
presenter?.deviceNotSupportedAlert()
return
}
setupScanPreview()
captureSession.startRunning()
}
fileprivate func initializeDeviceCamera() -> AVCaptureDeviceInput? {
guard let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
presenter?.deviceNotSupportedAlert()
return nil
}
do {
let videoInput: AVCaptureDeviceInput
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
return videoInput
} catch let outError as NSError {
presenter?.deviceNotSupportedAlert()
print("ScannerViewController -> Device Not Supported: \(outError.description)")
return nil
}
}
fileprivate func setupScanPreview() {
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.layer.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
view.layer.addSublayer(previewLayer)
}
fileprivate func stopSession() {
if (captureSession.isRunning == true) {
captureSession.stopRunning()
}
}
fileprivate func startSession() {
if (captureSession.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
startSession()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
stopSession()
}
fileprivate func found(code: String?) {
delegate?.scannerOutput(scannedString: code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .all
}
}
extension ScannerViewController : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: readableObject.stringValue)
}
dismiss(animated: true)
}
}
extension AVCaptureSession {
func add(captureInput videoInput: AVCaptureDeviceInput) -> Bool {
if self.canAddInput(videoInput) {
self.addInput(videoInput)
return true
}
return false
}
func addMetadataOutput(withDelegate delegate: AVCaptureMetadataOutputObjectsDelegate, andQueue queue: DispatchQueue) -> Bool {
let metadataOutput = AVCaptureMetadataOutput()
if self.canAddOutput(metadataOutput) {
metadataOutput.setMetadataObjectsDelegate(delegate, queue: queue)
if metadataOutput.canAddObject(ofType: .qr) {
metadataOutput.metadataObjectTypes = [.qr]
}
self.addOutput(metadataOutput)
return true
}
return false
}
}

Swift 4 QRCode Scanning [duplicate]

This question already has answers here:
Barcode on swift 4
(4 answers)
Closed 5 years ago.
That's the code I'm trying to scan QRCodes
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
Found it here.
But it doesn't work, everything is set correctly, I get the ViewController loaded and the captureSession running, but when I point to a QRcode nothing happens, captureOutput() is never called.
Please check your delegate method is wrong. Its should be like below :
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)

Disable rotation only for AVCaptureVideoPreviewLayer

I have a UITabBarController which has 4 items tabs. One of them is an AvCaptureVideoPreviewLayer (is a barcode scanner). What I would like to do is to disable autorotate only for AVCaptureVideoPreviewLayer (like iOS camera app) but not for the item bar which would rotate with the screen. It is a bit difficult situation because I think that the UITabBarConrtoller doesn't allow you easily to disable rotation.
The code for my camera view is:
import UIKit
import AVFoundation
class ScannerViewController: UIViewController,
// MARK: Properties
/// Manages the data flow from the capture stage through our input devices.
var captureSession: AVCaptureSession!
/// Dispays the data as captured from our input device.
var previewLayer: AVCaptureVideoPreviewLayer!
// MARK: Methods
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.blackColor()
self.captureSession = AVCaptureSession()
// This object represents a physical capture device and the properties associated with that device
let videoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Is useful for capturing the data from the input device
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
// TODO: maybe show an alert
return
}
if (self.captureSession.canAddInput(videoInput)) {
self.captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (self.captureSession.canAddOutput(metadataOutput)) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code, AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code]
} else {
failed()
return
}
// Adds the preview layer to display the captured data. Sets the videoGravity to AspectFill so that it covers the full screen
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession);
self.previewLayer.frame = self.view.layer.bounds;
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.view.layer.addSublayer(previewLayer);
self.captureSession.startRunning();
}
override func viewDidLayoutSubviews() {
self.previewLayer?.frame = self.view.layer.bounds;
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if (self.captureSession.running == false) {
self.captureSession.startRunning();
}
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
if (self.captureSession.running == true) {
self.captureSession.stopRunning();
}
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .Alert)
ac.addAction(UIAlertAction(title: "OK", style: .Default, handler: nil))
presentViewController(ac, animated: true, completion: nil)
self.captureSession = nil
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
self.captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject;
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
foundCode(readableObject.stringValue);
}
}
/// Completes some tasks when a barcode is found.
///
/// - parameter code: The the barcode found.
func foundCode(code: String) {
}
}
I'm not 100% sure if I understand it correctly. But what you could do is:
1 - Subclass your UITabBarController and override shouldAutorotate with something like this:
override var shouldAutorotate: Bool {
guard let viewController = tabBarController?.selectedViewController else { return false }
return viewController.shouldAutorotate
}
This means that the selected UIViewController will define if it should autorotate.
2 - Now that the UITabBarController will ask its ViewControllers if it should autorotate, you have to override shouldAutorotate in every controller as well.
And that's it!
Sorry if I misunderstood your question. If you provide more information it would help.
Take care :)

Resources