Ugh. My struggle with changes in Swift 4 AVFoundation continues.
I have a data matrix "QR" code that I'm trying to read.
It reads just fine with Swift 3 compilation but with the Swift 4 changes to the code it is not picked up.
Also note that the Apple provided example that is supposed to work with Swift 4 does not read the DataMatrix code either
When I print out the available types Data Matrix is available.
print("types available:\n \(metadataOutput.availableMetadataObjectTypes)")
yields:
types available:
[__ObjC.AVMetadataObject.ObjectType(_rawValue: face), ...
__ObjC.AVMetadataObject.ObjectType(_rawValue: org.iso.DataMatrix), ...
However, didOutput metadataObjects: is never called when I run the code. It DOES get called for ever other type however.
Additionally adding in the type explicitly:
metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.dataMatrix]
does nothing different.
Anyone have experience with scanning DataMatrix in Swift 4?
Code:
import UIKit
import AVFoundation
class ViewController: UIViewController,AVCaptureMetadataOutputObjectsDelegate {
var videoCaptureDevice: AVCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video)!
var device = AVCaptureDevice.default(for: AVMediaType.video)
var output = AVCaptureMetadataOutput()
var previewLayer: AVCaptureVideoPreviewLayer?
var captureSession = AVCaptureSession()
var code: String?
var scannedCode = UILabel()
override func viewDidLoad() {
super.viewDidLoad()
self.setupCamera()
self.addLabelForDisplayingCode()
}
private func setupCamera() {
let input = try? AVCaptureDeviceInput(device: videoCaptureDevice)
if self.captureSession.canAddInput(input!) {
self.captureSession.addInput(input!)
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
if let videoPreviewLayer = self.previewLayer {
videoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer.frame = self.view.bounds
view.layer.addSublayer(videoPreviewLayer)
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
print("types available:\n \(metadataOutput.availableMetadataObjectTypes)")
metadataOutput.metadataObjectTypes = metadataOutput.availableMetadataObjectTypes
// metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.dataMatrix]
} else {
print("Could not add metadata output")
}
}
private func addLabelForDisplayingCode() {
view.addSubview(scannedCode)
scannedCode.translatesAutoresizingMaskIntoConstraints = false
scannedCode.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -20.0).isActive = true
scannedCode.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant: 20.0).isActive = true
scannedCode.trailingAnchor.constraint(equalTo: view.trailingAnchor, constant: -20.0).isActive = true
scannedCode.heightAnchor.constraint(equalToConstant: 50).isActive = true
scannedCode.font = UIFont.preferredFont(forTextStyle: .title2)
scannedCode.backgroundColor = UIColor.black.withAlphaComponent(0.5)
scannedCode.textAlignment = .center
scannedCode.textColor = UIColor.white
scannedCode.text = "Scanning...."
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession.isRunning == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession.isRunning == true) {
captureSession.stopRunning();
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print(metadataObjects)
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
scannedCode.text = code
}
}
}
Thanks so much.
You have to make sure the connection is not mirrored.
The data matrix needs to be read in the original format.
https://developer.apple.com/documentation/avfoundation/avcaptureconnection/1389172-isvideomirrored
Related
I'm trying to use a Swift class from hackingswift.com that reads QR codes. This code has apparently been used in the past by posters to this forum, but it doesn't work now in Swift 5.5. I get an error 'expression failed to parse, unknown error' on the line
view.layer.addSublayer(previewLayer)
in the following. Any help would be appreciated.
//: A UIKit based Playground for presenting user interface
import AVFoundation
import UIKit
import PlaygroundSupport
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
// Present the view controller in the Live View window
PlaygroundPage.current.liveView = ScannerViewController()
This is an annoying bug in playgrounds. Leaving aside for a moment that there is no capture device available in playgrounds or the simulator, so none of this will work anyway, the fix is to treat view as an optional:
view?.layer.addSublayer(previewLayer)
I have no idea why this is the case, and nor do some other people
.addSublayer is working as expected.
You can try out the sample code below in the playground for testing.
let view = UIView(frame: CGRect(x: 0, y: 0, width: 200, height: 200))
view.backgroundColor = .cyan
let layer = CALayer()
layer.bounds = CGRect(x: 0, y: 0, width: 100, height: 100)
layer.position = CGPoint(x: 200/2, y: 200/2)
layer.backgroundColor = UIColor.magenta.cgColor
layer.borderWidth = 5
layer.borderColor = UIColor.black.cgColor
view.layer.addSublayer(layer)
PlaygroundPage.current.liveView = view
I rapidly tried the sample code you provided in Playgrounds and AVCaptureDevice.default(for: .video) seems to fail.
I am trying to make a barcode scanner app. As soon as the camera session begins, the app crashes within a few seconds.
I am unable to find the reason behind this. and, how to fix this one.
I have used https://www.appcoda.com/barcode-reader-swift/ to make the barcde scanner.
import Foundation
import UIKit
import AVFoundation
import CoreData
enum BarcodeScanError : String{
case cameraLoadFailed = "Camera Load Failed"
case NoValidBarcode = "No Valid Barcode"
}
class ScanBoardingPassViewController : UIViewController {
//MARK: - Properties
var viewModel : ScanBoardingPassViewModel? = nil
var captureSession : AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
private let supportedCodeTypes = [AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417]
//MARK: - Outlets
#IBOutlet weak var btnCancel: UIButton!
//MARK: - View Life Cycle
override func viewDidLoad() {
viewModel = ScanBoardingPassViewModel()
self.captureSession = AVCaptureSession()
self.setUpView()
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: - Set Up View
func setUpView() {
self.setUpBarCodeScanner()
self.view.bringSubviewToFront(self.btnCancel)
self.setUpBarcodeRecognizerFrame()
}
private func setUpBarCodeScanner() {
// Get the back-facing camera for capturing videos
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
debugPrint(BarcodeScanError.cameraLoadFailed)
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
}
private func setUpBarcodeRecognizerFrame() {
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubviewToFront(qrCodeFrameView)
}
}
//MARK: - Outlets
#IBAction func btnCancelPressed(_ sender: UIButton) {
self.dismissView()
}
func dismissView() {
self.dismiss(animated: true, completion: nil)
}
}
extension ScanBoardingPassViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
debugPrint(BarcodeScanError.NoValidBarcode)
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
captureSession?.stopRunning()
debugPrint("Valid Barcode found \(metadataObj.stringValue!)")
if let boardingPass = viewModel?.parseBoardingPassString(boardingPassString : metadataObj.stringValue!) {
let unitOfWork = UnitOfWork(context:( UIApplication.shared.delegate as! AppDelegate).persistentContainer.newBackgroundContext() )
unitOfWork.boardingPassRepository.saveBoardingPasses(boardingPass: boardingPass)
unitOfWork.saveChanges()
print(unitOfWork.boardingPassRepository.getBoardingPasses(predicate: nil))
self.dismissView()
}
}
}
}
}
The camera doesnt get struck. But, the app gives an lldb everytime within a few seconds.
// Created by Satya Narayana on 17/11/20.
//
import UIKit
import AVFoundation
import UIKit
class QRViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
//MARK: Outlets
#IBOutlet weak var qrLbl: UILabel! // BarCode displaying Label
#IBOutlet weak var sView: UIView! // View
//MARK: Variables
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
//MARK: View Methods
override func viewDidLoad() {
super.viewDidLoad()
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr, .ean13, .code128]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = sView.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
sView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
self.showToast(message: "Scanning not supported.Your device does not support scanning a code from an item. Please use a device with a camera.", seconds: 1.0)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
qrLbl.isHidden = true
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
//MARK:- Found BARCODE
func found(code: String) {
print(code)
if code != ""{
print(code) // This is Barcode
qrLbl.text = code
}else{
// if you need run again uncomment below line
//self.captureSession.startRunning()
}
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
I am going to scan the QR code in the webView.
QR code scans well and data can be read,
but the problem is, the camera screen won't close after scanning. I'm running a dismiss() function.
webView Load
#IBOutlet weak var indicator: UIImageView!
#IBOutlet var wkWebView: WKWebView!
...
let config = WKWebViewConfiguration()
contentController.add(self, name: "native")
config.userContentController = contentController
wkWebView = WKWebView(frame: wkWebView.frame, configuration: config)
wkWebView.uiDelegate = self
wkWebView.navigationDelegate = self
view.addSubview(wkWebView)
view.addSubview(indicator)
let localFilePath = Bundle.main.url(forResource: webUrl, withExtension: "html")
let myRequest = URLRequest(url: localFilePath)
wkWebView.load(myRequest)
QRCode Scan
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func qrcodeScan(){
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
self.dismiss(animated: true, completion: nil)
}
func found(code: String) {
Log.Info(code)
}
func failed() {
captureSession = nil
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
As you can see from my code, I'm trying to get the camera window down after reading the QR code.
But the camera screen is still on the frozen screen. No matter how long I wait, I can't see the web view screen. What's the problem?
The previewLayer is taking all the screen (bounds) and it is hiding the webview, so you should remove the previewLayer from the super layer in order to show the webview.
So the solution is to use this :
previewLayer.removeFromSuperlayer()
instead of
self.dismiss(animated: true, completion: nil)
There seems to be a lot of issues similar to what I am experiencing:
AVmetadata changes with swift 4 xcode 9
AVCaptureMetadataOutput setMetadataObjectTypes unsupported type found
And there is an Apple bug that deals with AVFoundation:
https://forums.developer.apple.com/thread/86810#259270
But none of those seem to actually be the answer for me.
I have code that runs great in swift 3, but will only error out in swift 4. Using the solutions in the above links results in no change at all.
Code:
import UIKit
import AVFoundation
class BarCodeScanViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
weak var delegate: FlowControllerDelegate?
var captureSession: AVCaptureSession = AVCaptureSession()
var previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.canAddInput(videoInput)
} else {
failed()
return
}
// let captureMetadataOutput = AVCaptureMetadataOutput()
let metadataOutput = AVCaptureMetadataOutput()
if captureSession.canAddOutput(metadataOutput) {
captureSession.addOutput(metadataOutput)
// Check status of camera permissions
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
// metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.upce]
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417, .upce]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
// captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if(captureSession.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
if captureSession.isRunning == true {
captureSession.stopRunning()
}
super.viewWillDisappear(animated)
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metatdataObject = metadataObjects.first {
guard let readableObject = metatdataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
When I build this code in Xcode 8 and swift 3 it works fine. When I run it in Xcode 9 swift 4it crashes at adding the media types:
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417, .upce]
In both cases I am building to an iOS 11 device that did not have the beta on it previously.
I have tried the "__" to see if it was the Apple bug metioned above.
If I comment the line out the code runs but there is not capture.
Is there some other bug Apple introduced maybe? Anyone else having this issue?
Any help would be appreciated.
Thanks
Further information for Clarity:
Leevi Graham is correct as well as it being true that Apple changed the stack without proper documentation. This results in it seeming like there is a bug.
Barcode on swift 4
Clarifying comments that helped me:
The delegate call back has changed from:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
to
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
HOWEVER, the real problem I was having was that you no longer set a long array of types to get your metadataObjectTypes. You now just set for all available types:
metadataOutput.metadataObjectTypes =
metadataOutput.availableMetadataObjectTypes
So ...
This is, in fact an API issue. Several radar issues were filed for it. But Apple has Kindly changed their AVFoundation docs to address the issue.
I created a simple app supposes to show a live image to a view from iPhone camera modifying this tutorial which is written in Swift2 into Swift3.
I use Xcode 8.1 and run the app on iPhone 6 with iOS version of 9.3.4
my Xcode deployment target setting is 9.1
When I run the app, the code complete without an error but camera image is not showing. The app doesn't even show permission alert for using camera.
What am I doing wrong?
Following is my complete code.
import UIKit
import AVFoundation
class ViewController: UIViewController {
// MARK: UI
let cameraView: UIView = {
let v = UIView()
v.translatesAutoresizingMaskIntoConstraints = false
return v
}()
// MARK: System
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
setupSubviews()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
cameraSetup()
}
func setupSubviews() {
view.addSubview( cameraView )
cameraView.backgroundColor = UIColor.orange
cameraView.centerXAnchor.constraint(equalTo: view.centerXAnchor).isActive = true
cameraView.centerYAnchor.constraint(equalTo: view.centerYAnchor).isActive = true
cameraView.widthAnchor.constraint(equalToConstant: 200).isActive = true
cameraView.heightAnchor.constraint(equalToConstant: 200).isActive = true
}
func cameraSetup() {
print("**** Start Camera Setup ****")
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)! as! [AVCaptureDevice]
for device in devices {
if device.position == .front {
print("Front")
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
print("Input added")
if captureSession.canAddOutput(sessionOutput) {
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
print("Output added")
}
}
} catch {
print("Error")
}
}
}
print("**** Finish Camera Setup ****")
}
}