AVCaptureMetadataOutputObjectsDelegate method not called - ios

I have a ViewController that continuously scans for QR codes and implements AVCaptureMetadataOutputObjectsDelegate to retrieve the metadata output.
class ScanViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession = AVCaptureSession()
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
// Get the back-facing camera for capturing videos
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaTypeVideo, position: .back)
guard let captureDevice = deviceDiscoverySession?.devices.first else {
print("Failed to get the camera device")
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print("in function of extension")
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
print("nada qr code")
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObjectTypeQRCode {
if metadataObj.stringValue != nil {
print(metadataObj.stringValue)
}
}
}
}
But for some reason the delegate callback doesn't get called. In the tutorial I'm following, it runs perfectly.
Hopefully someone can provide any words of assistance. Anything would be immensely helpful to get me on the right track. Thanks so much in advance.

The reason for not running is that metadataOutput(...) is technically a built-in function of the AV framework, but only for Swift 4. In Swift 3.0 and 3.2, the correct function is:
captureOutput(_ output: _AVCaptureOutput!, didOutputMetadataObjects: [Any]!, from connection: AVCaptureConnection!)
That should run!

The extension code you posted does not compile for me. I had to add public to the method declaration:
public func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
I am using Swift 4.

Change the code as follows. It may work, at least it worked for me. I am using Swift 4.2
captureMetadataOutput.metadataObjectTypes = captureDeviceOutput.availableMetadataObjectTypes

Related

Swift - captureOutput is not being executed

I am currently trying to implement a camera live feed to my app. I've got it setup but somehow it's not working as expected. As far as I understand, captureOutput should be executed every time a frame is recognized and the print message should be output in console, but somehow it's not - the console won't show the print command.
Does anybody see any possible mistake inside of the code?
I don't know whether that's connected to my problem, but at start of the app the console shows the following:
[BoringSSL] nw_protocol_boringssl_get_output_frames(1301) [C1.1:2][0x106b24530] get output frames failed, state 8196
import UIKit
import AVKit
import Vision
class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
override func viewDidLoad() {
super.viewDidLoad()
let captureSession = AVCaptureSession()
guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else{ return }
captureSession.addInput(input)
captureSession.startRunning()
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(previewLayer)
previewLayer.frame = view.frame
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
captureSession.addOutput(dataOutput)
// let request = VNCoreMLRequest
// VNImageRequestHandler(cgImage: <#T##CGImage#>, options: [:]).perform(request)
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("Es hat funktioniert")
}
}
You need to implement captureOutput(_:didOutput:from:) not captureOutput(_:didDrop:from:)
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
print("Es hat funktioniert")
}

QR code is not working - Delegate method is not getting called

I have seen many threads and tutorials regarding this but nothing solved my problem. I have followed this tutorial for implementing simple QR code application but code is not getting detected.In most of QR related question the issue was related to delegate methods old name. I have verified it that i am using updated delegate method but still its not working.
This is my code. Any one can please highlight which silly mistake i am doing.
import UIKit
import AVFoundation
class QRScannerController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet var messageLabel:UILabel!
#IBOutlet var topbar: UIView!
var captureSession:AVCaptureSession?
var videoPreviewLayer:AVCaptureVideoPreviewLayer?
var qrCodeFrameView:UIView?
let supportedCodeTypes = [AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeAztecCode,
AVMetadataObjectTypeInterleaved2of5Code,
AVMetadataObjectTypeITF14Code,
AVMetadataObjectTypeDataMatrixCode] as [String]
override func viewDidLoad() {
super.viewDidLoad()
// Get an instance of the AVCaptureDevice class to initialize a device object and provide the video as the media type parameter.
let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice!)
// Initialize the captureSession object.
captureSession = AVCaptureSession()
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
// Move the message label and top bar to the front
view.bringSubview(toFront: messageLabel)
view.bringSubview(toFront: topbar)
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubview(toFront: qrCodeFrameView)
}
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - AVCaptureMetadataOutputObjectsDelegate Methods
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
messageLabel.text = "No QR/barcode is detected"
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(where: { $0 == metadataObj.type }) {
// If the found metadata is equal to the QR code metadata then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
messageLabel.text = metadataObj.stringValue
}
}
}
}
I am testing it on iPod with iOS version 9.3.5 and the code is in
swift 3 with xcode 8.3.2.
on iOS 10.x metadataOutput is called captureOutput. Change the name and the signature and it should work flawlessly:
func captureOutput(_ output: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
// func code
}

Update from swift 4 breaks app

My friends and I recently updated from swift 3.2 to swift 4. Unfortunately, our project no longer functions correctly for our QR reader function. The video output works fine, so when we load the page the screen displays when the phone is looking at. However, it will no longer place the sizing box on the screen or recognize the QR codes in front of it.
import UIKit
import AVFoundation
class ScanViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet var messageLabel:UILabel!
#IBOutlet var topbar: UIView!
var captureSession: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
override func viewDidLoad() {
super.viewDidLoad()
// Get an instance of the AVCaptureDevice class to initialize a device object and provide the video as the media type parameter.
let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Initialize the captureSession object.
captureSession = AVCaptureSession()
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Move the message label and top bar to the front
view.bringSubview(toFront: messageLabel)
view.bringSubview(toFront: topbar)
// Start video capture.
captureSession?.startRunning()
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.randomColor().cgColor
qrCodeFrameView.layer.borderWidth = 4
view.addSubview(qrCodeFrameView)
view.bringSubview(toFront: qrCodeFrameView)
}
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects == nil || metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
messageLabel.text = "No QR code is detected"
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObjectTypeQRCode {
// If the found metadata is equal to the QR code metadata then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil { //Output
messageLabel.text = metadataObj.stringValue
performSegue(withIdentifier: "QRFound", sender: self)
}
}
}
#IBAction func cancel(_ sender: UIButton) {
if let owningNavController = navigationController {
owningNavController.popViewController(animated: true)
}
dismiss(animated: true, completion: nil)
}
}
Your delegate function is not correct, so it’s never called. The correct one is:
optional func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
See the documentation page.
That seems to have changed in Swift 4.
EDIT: Also see this answer.

Reading QRCodes with AVCaptureSession

We have implemented reading QRCodes on a receipt using AVCaptureSession. One issue that we have noticed is that it is rather finicky in our hit rate of it reading the QRCode. You really have to work at it to get it to recognize that there is a QRCode on the receipt. I have paddled/googled around trying to see if anyone has published material on how to increase the hit rate. I have implemented AutoFocus and that has helped marginally.
I did take a look at: Reading on QRCodes on iOS with AVCaptureSession -- alignment issues? but there was no actionable suggestions there.
Any thoughts on other approaches?
Here is the reader code:
import UIKit
import AVFoundation
final class BarcodeReader: NSObject {
fileprivate var captureSession: AVCaptureSession?
fileprivate var videoPreviewLayer: AVCaptureVideoPreviewLayer?
fileprivate unowned let barcodeReaderDelegate: BarcodeReaderDelegate
init(barcodeReaderDelegate: BarcodeReaderDelegate) {
self.barcodeReaderDelegate = barcodeReaderDelegate
}
func start(in view: UIView) throws {
if captureSession == nil {
captureSession = try configuredCaptureSession()
}
if videoPreviewLayer == nil {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer!.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
}
captureSession!.startRunning()
}
fileprivate func configuredCaptureSession() throws -> AVCaptureSession {
let captureSession = AVCaptureSession()
// NOTE: Remember to add a message in your Info.plist file under the
// key NSCameraUsageDescription or this will crash the app.
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
// make sure that auto focus is turned on
if (device?.isFocusModeSupported(.continuousAutoFocus))! {
try device?.lockForConfiguration()
device?.focusMode = .continuousAutoFocus
device?.unlockForConfiguration()
}
let input = try AVCaptureDeviceInput(device: device)
captureSession.addInput(input)
let output = AVCaptureMetadataOutput()
captureSession.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
return captureSession
}
func stop() {
captureSession?.stopRunning()
}
}
extension BarcodeReader: AVCaptureMetadataOutputObjectsDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
guard let metadataObject = metadataObjects.first as? AVMetadataMachineReadableCodeObject else { return }
guard metadataObject.type == AVMetadataObjectTypeQRCode else { return
}
stop()
barcodeReaderDelegate.barcodeReader(self, found: metadataObject.stringValue)
}
}
protocol BarcodeReaderDelegate: class {
func barcodeReader(_ barcodeReader: BarcodeReader, found code: String)
}

Swift: captureOutput not being called

So I am working on my first Swift app which was going fine until I got stuck here. Please take a look at the code below.
//
// CameraFrames.swift
// Explore
//
// Created by Kushagra Agarwal on 13/08/15.
// Copyright © 2015 Kushagra Agarwal. All rights reserved.
//
import Foundation
import AVFoundation
protocol CameraFramesDelegate {
func processCameraFrames(sampleBuffer : CMSampleBufferRef)
}
class CameraFrames : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
var previewLayer : AVCaptureVideoPreviewLayer?
var delegate : CameraFramesDelegate?
let captureSession = AVCaptureSession();
var captureDevice : AVCaptureDevice?
override init() {
super.init()
captureSession.beginConfiguration()
// Capture the session with High settings preset
captureSession.sessionPreset = AVCaptureSessionPresetHigh;
self.captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo);
// Because the old error handling method is deprecated
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice));
} catch {
print("Error in getting input from camera");
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
let output = AVCaptureVideoDataOutput();
var outputQueue : dispatch_queue_t?
outputQueue = dispatch_queue_create("outputQueue", DISPATCH_QUEUE_SERIAL);
output.setSampleBufferDelegate(self, queue: outputQueue)
output.alwaysDiscardsLateVideoFrames = true;
output.videoSettings = nil;
captureSession.addOutput(output);
captureSession.commitConfiguration()
captureSession.startRunning();
}
func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
print("frame dropped")
}
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
print("frame received")
}
}
The preview shows up fine on the phone but the captureOutput is never called for some reason. I looked at some old SO threads, but none helped me resolve this issue. Any idea what can be the reason behind it?
Edit: I forgot to mention that I have a view controller in which I am previewing the AVCaptureVideoPreviewLayer layer, which works well. I have a feeling that the issue is somewhere in setting up the outputQueue but I can't figure out what.

Resources