QR scanner not working but not getting any errors? - ios

import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
#IBOutlet weak var square: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
//creates the session
let session = AVCaptureSession()
//Defines the capture device
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video),
let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }
session.addInput(input)
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(square)
session.startRunning()
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects
metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject {
if object.type == AVMetadataObject.ObjectType.qr {
let alert = UIAlertController(title: "QR Code", message: object.stringValue,
preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default,
handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
No errors print out and terminal is also empty. This is my updated version of some Swift 3 code that did work so don't understand why it won't work

Try this, working fine with me - using Swift 5.2. Modify the function found() with whatever you want with the returned string. Reference: qr scanner
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}

Related

How to set Image and Button In qr Code scanner Screen?

I'm looking to make a barcode scanner app. As I'm new to Swift and Xcode I've managed to get help from other stack-overflow articles to create a page where I can scan a barcode. My issue is I don't want full screen, I want to add Label on top and Button at bottom and In Center QRCode screen. I have Uploaded Screen as well.
But I'm Getting Full Screen. How can I achieve This Functionality.
Here's My Code
#IBAction func scanButtonTapped(_ sender: UIButton) {
scanQRCodeTapped()
}
func scanQRCodeTapped(){
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.view.backgroundColor = UIColor.black
self.captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else {
print("No Device Found")
return
}
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (self.captureSession.canAddInput(videoInput)) {
self.captureSession.addInput(videoInput)
} else {
self.failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (self.captureSession.canAddOutput(metadataOutput)) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
self.failed()
return
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.frame = self.view.layer.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.view.layer.addSublayer(self.previewLayer)
self.captureSession.startRunning()
}
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
login(emailID: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
You can design the screen as per your design in the storyboard.
Like this:
The Gray color area is a UIView in which we will add the previewLayer for scanner.
ViewController code should look like this:
import UIKit
import AVKit
class QRCodeViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var titleLabel: UILabel!
#IBOutlet weak var scannerView: UIView!
#IBOutlet weak var scanButton: UIButton!
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
#IBAction func scanButtonAction(_ sender: UIButton) {
scanQRCodeTapped()
}
func scanQRCodeTapped() {
self.scannerView.backgroundColor = UIColor.black
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else {
print("No Device Found")
return
}
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (self.captureSession.canAddInput(videoInput)) {
self.captureSession.addInput(videoInput)
} else {
self.failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (self.captureSession.canAddOutput(metadataOutput)) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
self.failed()
return
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.frame = self.scannerView.layer.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.scannerView.layer.addSublayer(self.previewLayer)
self.captureSession.startRunning()
}
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
login(emailID: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
}
The output will look like this when you click on the scan button:

After a barcode scan the screen freezes

I am learning SWIFT and getting better. The barcode application I have works, but it keeps scanning the barcode endlessly, until I force a pop up screen 'Ok', 'Cancel' to accept or reject the scan. On research, I found very similar code (shown below). So I created a test project with just one view controller to try this code. It works great, but freezes as soon as the barcode is scanned. My intent is UNFREEZE the screen, accept and save this and allow to user to scan more barcodes. Any help is greatly appreciated. I tried stop scanning just under the line found(code:) but it didn't help unfreezing. I may be wrong, but I suspect the viewilldisapper never gets called here.
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
Edited code:
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
barcodeValue = stringValue
}
// dismiss(animated: true)
previewLayer.removeFromSuperlayer()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
let alertController = UIAlertController(title: "Barcode Scanned", message: barcodeValue!, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: "OK", style: .default, handler: {(alert: UIAlertAction!) in self.restartScan()}))
alertController.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler:nil))
present(alertController, animated: true, completion: nil)
//captureSession.startRunning()
}
func found(code: String) {
print(code)
}
func restartScan() {
captureSession.startRunning()
}
It's not a freeze , when the first barcode is detected you stop the process with captureSession.stopRunning() inside func metadataOutput(_ output: AVCaptureMetadataOutput you need this
previewLayer.removeFromSuperlayer()
instead of this
dismiss(animated: true)
as it's a layer not a vc to dismiss that you previously added in
view.layer.addSublayer(previewLayer)
Note: background of your view is black

Scanning QR Code

I've got the following Class in an attempt to scan a QR code. I just can't get it to work for some reason. I'm able to present this ViewController which is the outputting what the camera is seeing but whenever I try to scan a QR nothing happens.
Any idea what is missing?
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
I've also added the following value to plist:
key: Privacy - Camera Usage Description
value: $(PRODUCT_NAME) camera use
your scanning code check will be in the following delegate:
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
Your code will be like this:
import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}

Navigation Bar in Scanner View - IOS - Swift

I want to add to my barcode scanner view the navigation bar. In this way I can close the barcode scanner (modal) view but something doesn't work because the navigation bar isn't here.
These are my setupNavBar method and my navBar to setup my navigationBar:
let navBar: UINavigationBar = {
let bar = UINavigationBar()
bar.translatesAutoresizingMaskIntoConstraints = false
return bar
}()
func setupNavBar(){
view.addSubview(navBar)
view.bringSubview(toFront: navBar)
navBar.heightAnchor.constraint(equalTo: view.heightAnchor, multiplier: 0.09).isActive = true
navBar.widthAnchor.constraint(equalTo: view.widthAnchor).isActive = true
navBar.leftAnchor.constraint(equalTo: view.leftAnchor).isActive = true
navBar.rightAnchor.constraint(equalTo: view.rightAnchor).isActive = true
navBar.topAnchor.constraint(equalTo: view.topAnchor).isActive = true
let doneItem = UIBarButtonItem(title:"Cancel",style: .done, target: nil, action: #selector(doneButton));
let navItem = UINavigationItem(title: "Scanner");
navItem.rightBarButtonItem = doneItem;
navBar.setItems([navItem], animated: false);
}
func doneButton(){
let presentingViewController: UIViewController! = self.presentingViewController
presentingViewController.presentingViewController!.dismiss(animated: true, completion: nil)
}
Below this is my code for the scanner:
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
setupNavBar()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
let videoCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.frame = view.layer.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
view.layer.addSublayer(previewLayer);
captureSession.startRunning();
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning();
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject;
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: readableObject.stringValue);
self.isbn = readableObject.stringValue
self.performSegue(withIdentifier: "scanToSell", sender: nil)
}
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
Thank you in advance. Good Job
I faced the same issue a few days before. I have created a scannerviewcontroller.
Check this: https://gist.github.com/Raghvendra7/ff6335c47bbca04fb88d1cb76917d2e5,
it solved the cancel issue.
Hope it will help.
Working on scanner effect in this controller and will update as it completed.
I have found my error. In my viewDidLoad method the setupNavBar have to change position in this way.
override func viewDidLoad() {
super.viewDidLoad()
// setupNavBar() No
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
let videoCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.frame = view.layer.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
view.layer.addSublayer(previewLayer);
setupNavBar() // Yes
captureSession.startRunning();
}

Screen got stuck while capturing the QR "second time" in my IOS application in swift

In my application I need to read QA Code. For the first time it showing string in QR Code. But when second time I try to use it. it is getting stuck in captured photo mode, but the backing code flow is running.
import UIKit
import AVFoundation
class BarCodeViewController: UIViewController,AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
let appDelegat : AppDelegate = UIApplication.sharedApplication().delegate as! AppDelegate
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.blackColor()
captureSession = AVCaptureSession()
let videoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed();
return;
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.frame = view.layer.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
view.layer.addSublayer(previewLayer);
captureSession.startRunning();
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .Alert)
ac.addAction(UIAlertAction(title: "OK", style: .Default, handler: nil))
presentViewController(ac, animated: true, completion: nil)
captureSession = nil
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.running == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.running == true) {
captureSession.stopRunning();
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject;
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
foundCode(readableObject.stringValue);
}
}
func foundCode(code: String) {
var x = code.componentsSeparatedByString("-")
let productProfile = ProductDetailViewController(nibName: "ProductDetailViewController", bundle: nil)
productProfile.ProductCode = x[1]
dismissViewControllerAnimated(true, completion: nil)
appDelegat.centerContainer!.centerViewController = productProfile
}
override func prefersStatusBarHidden() -> Bool {
return true
}
override func supportedInterfaceOrientations() -> UIInterfaceOrientationMask {
return .Portrait
}
}
I tried but cannot figure it out.. please help me in this
func foundCode(code: String) {
var x = code.componentsSeparatedByString("-")
let productProfile = ProductDetailViewController(nibName: "ProductDetailViewController", bundle: nil)
productProfile.ProductCode = x[1]
captureSession.stopRunning()
dismissViewControllerAnimated(true, completion: nil)
self.navigationController?.popViewControllerAnimated(false)
appDelegat.centerContainer!.centerViewController = productProfile
}
added self.navigationController?.popViewControllerAnimated(false)

Resources