I've got a AV captureSession which starts like this on app init:
class FrameExtractor: NSObject {
private let captureSession = AVCaptureSession()
deinit {
captureSession.stopRunning()
}
fileprivate override init() {
super.init()
sessionQueue.async {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.configureSession()
self.captureSession.startRunning()
self.listener = NotificationCenter.default.publisher(for: UIDevice.orientationDidChangeNotification)
.compactMap { _ in (UIApplication.shared.connectedScenes.first as? UIWindowScene)?.interfaceOrientation ?? .unknown }
.assign(to: \.orientation, on: self)
}
}
}
}
This works fine. But for some reason the captureSession stops after a certain amount of (completely unrelated) file input/output. Thus I was thinking maybe I should stop (captureSession.stopRunning()) and start the captureSession before all file I/O since this class never gets deinitialised where the stopRunning is located. My question what would be the best practice to stop this session from outside this class without starting additional threats (Queues)?
Related
#DEFINE UPDATE
I realised I had forgotten to ask for recording permission. That has now been fixed. However, when I press the "Record button" I get the error Cannot create file. So when I start the recording, something is fishy with the path maybe?
#UNDEF UPDATE
I am working on an app where I want to have my own neural network with the functionality to start recording a video. Thereafter I want to play the video and use information from the neural network.
I have a working function in Android, now I am trying to make something similar for iPhone. As a start, I have used an ImageClassifierExample from TensorFlowLite. The first task is to add a button Record which starts recording a video and then a button Play which plays the video.
I have implemented the two features, but when I try and play the video, it is just loading. It can either be the recording is not working, or the video player is not working (or both). I have checked so the paths are the same.
I am not so familiar with iOS development so some help would be nice.
This is the base I am starting from.
Here is my slightly adopted ViewController:
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import AVFoundation
import AVKit
import UIKit
class ViewController: UIViewController {
// MARK: Storyboards Connections
#IBOutlet weak var previewView: PreviewView!
#IBOutlet weak var cameraUnavailableLabel: UILabel!
#IBOutlet weak var resumeButton: UIButton!
#IBOutlet weak var bottomSheetView: CurvedView!
#IBOutlet weak var bottomSheetViewBottomSpace: NSLayoutConstraint!
#IBOutlet weak var bottomSheetStateImageView: UIImageView!
// MARK: Constants
private let animationDuration = 0.5
private let collapseTransitionThreshold: CGFloat = -40.0
private let expandThransitionThreshold: CGFloat = 40.0
private let delayBetweenInferencesMs: Double = 1000
// MARK: Instance Variables
// Holds the results at any time
private var result: Result?
private var initialBottomSpace: CGFloat = 0.0
private var previousInferenceTimeMs: TimeInterval = Date.distantPast.timeIntervalSince1970 * 1000
// MARK: Controllers that manage functionality
// Handles all the camera related functionality
private lazy var cameraCapture = CameraFeedManager(previewView: previewView)
private var isRecording = false // <<<----- Mine
private let captureSession: AVCaptureSession = AVCaptureSession()
// Handles all data preprocessing and makes calls to run inference through the `Interpreter`.
private var modelDataHandler: ModelDataHandler? =
ModelDataHandler(modelFileInfo: MobileNet.modelInfo, labelsFileInfo: MobileNet.labelsInfo)
#IBAction func startRecording(_ sender: Any) {. // <<<----- Mine
print("Recording pressed")
if (!isRecording) {
cameraCapture.startRecording()
} else {
cameraCapture.stopRecording()
}
isRecording = !isRecording
}
// Handles the presenting of results on the screen
private var inferenceViewController: InferenceViewController?
// MARK: View Handling Methods
override func viewDidLoad() {
super.viewDidLoad()
guard modelDataHandler != nil else {
fatalError("Model set up failed")
}
#if targetEnvironment(simulator)
previewView.shouldUseClipboardImage = true
NotificationCenter.default.addObserver(self,
selector: #selector(classifyPasteboardImage),
name: UIApplication.didBecomeActiveNotification,
object: nil)
#endif
cameraCapture.delegate = self
addPanGesture()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
changeBottomViewState()
#if !targetEnvironment(simulator)
cameraCapture.checkCameraConfigurationAndStartSession()
#endif
}
#if !targetEnvironment(simulator)
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
cameraCapture.stopSession()
}
#endif
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
func presentUnableToResumeSessionAlert() {
let alert = UIAlertController(
title: "Unable to Resume Session",
message: "There was an error while attempting to resume session.",
preferredStyle: .alert
)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
self.present(alert, animated: true)
}
// MARK: Storyboard Segue Handlers
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
super.prepare(for: segue, sender: sender)
if segue.identifier == "EMBED" {
guard let tempModelDataHandler = modelDataHandler else {
return
}
inferenceViewController = segue.destination as? InferenceViewController
inferenceViewController?.wantedInputHeight = tempModelDataHandler.inputHeight
inferenceViewController?.wantedInputWidth = tempModelDataHandler.inputWidth
inferenceViewController?.maxResults = tempModelDataHandler.resultCount
inferenceViewController?.threadCountLimit = tempModelDataHandler.threadCountLimit
inferenceViewController?.delegate = self
}
}
#objc func classifyPasteboardImage() {
guard let image = UIPasteboard.general.images?.first else {
return
}
guard let buffer = CVImageBuffer.buffer(from: image) else {
return
}
previewView.image = image
DispatchQueue.global().async {
self.didOutput(pixelBuffer: buffer)
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
}
// MARK: InferenceViewControllerDelegate Methods
extension ViewController: InferenceViewControllerDelegate {
func didChangeThreadCount(to count: Int) {
if modelDataHandler?.threadCount == count { return }
modelDataHandler = ModelDataHandler(
modelFileInfo: MobileNet.modelInfo,
labelsFileInfo: MobileNet.labelsInfo,
threadCount: count
)
}
}
// MARK: CameraFeedManagerDelegate Methods
extension ViewController: CameraFeedManagerDelegate {
func didOutput(pixelBuffer: CVPixelBuffer) {
let currentTimeMs = Date().timeIntervalSince1970 * 1000
guard (currentTimeMs - previousInferenceTimeMs) >= delayBetweenInferencesMs else { return }
previousInferenceTimeMs = currentTimeMs
// Pass the pixel buffer to TensorFlow Lite to perform inference.
result = modelDataHandler?.runModel(onFrame: pixelBuffer)
// Display results by handing off to the InferenceViewController.
DispatchQueue.main.async {
let resolution = CGSize(width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
self.inferenceViewController?.inferenceResult = self.result
self.inferenceViewController?.resolution = resolution
self.inferenceViewController?.tableView.reloadData()
}
}
// MARK: Session Handling Alerts
func sessionWasInterrupted(canResumeManually resumeManually: Bool) {
// Updates the UI when session is interupted.
if resumeManually {
self.resumeButton.isHidden = false
} else {
self.cameraUnavailableLabel.isHidden = false
}
}
func sessionInterruptionEnded() {
// Updates UI once session interruption has ended.
if !self.cameraUnavailableLabel.isHidden {
self.cameraUnavailableLabel.isHidden = true
}
if !self.resumeButton.isHidden {
self.resumeButton.isHidden = false
}
}
func sessionRunTimeErrorOccured() {
// Handles session run time error by updating the UI and providing a button if session can be manually resumed.
self.resumeButton.isHidden = false
previewView.shouldUseClipboardImage = true
}
func presentCameraPermissionsDeniedAlert() {
let alertController = UIAlertController(title: "Camera Permissions Denied", message: "Camera permissions have been denied for this app. You can change this by going to Settings", preferredStyle: .alert)
let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil)
let settingsAction = UIAlertAction(title: "Settings", style: .default) { (action) in
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!, options: [:], completionHandler: nil)
}
alertController.addAction(cancelAction)
alertController.addAction(settingsAction)
present(alertController, animated: true, completion: nil)
previewView.shouldUseClipboardImage = true
}
func presentVideoConfigurationErrorAlert() {
let alert = UIAlertController(title: "Camera Configuration Failed", message: "There was an error while configuring camera.", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
self.present(alert, animated: true)
previewView.shouldUseClipboardImage = true
}
}
// MARK: Bottom Sheet Interaction Methods
extension ViewController {
// MARK: Bottom Sheet Interaction Methods
/**
This method adds a pan gesture to make the bottom sheet interactive.
*/
private func addPanGesture() {
let panGesture = UIPanGestureRecognizer(target: self, action: #selector(ViewController.didPan(panGesture:)))
bottomSheetView.addGestureRecognizer(panGesture)
}
/** Change whether bottom sheet should be in expanded or collapsed state.
*/
private func changeBottomViewState() {
guard let inferenceVC = inferenceViewController else {
return
}
if bottomSheetViewBottomSpace.constant == inferenceVC.collapsedHeight - bottomSheetView.bounds.size.height {
bottomSheetViewBottomSpace.constant = 0.0
}
else {
bottomSheetViewBottomSpace.constant = inferenceVC.collapsedHeight - bottomSheetView.bounds.size.height
}
setImageBasedOnBottomViewState()
}
/**
Set image of the bottom sheet icon based on whether it is expanded or collapsed
*/
private func setImageBasedOnBottomViewState() {
if bottomSheetViewBottomSpace.constant == 0.0 {
bottomSheetStateImageView.image = UIImage(named: "down_icon")
}
else {
bottomSheetStateImageView.image = UIImage(named: "up_icon")
}
}
/**
This method responds to the user panning on the bottom sheet.
*/
#objc func didPan(panGesture: UIPanGestureRecognizer) {
// Opens or closes the bottom sheet based on the user's interaction with the bottom sheet.
let translation = panGesture.translation(in: view)
switch panGesture.state {
case .began:
initialBottomSpace = bottomSheetViewBottomSpace.constant
translateBottomSheet(withVerticalTranslation: translation.y)
case .changed:
translateBottomSheet(withVerticalTranslation: translation.y)
case .cancelled:
setBottomSheetLayout(withBottomSpace: initialBottomSpace)
case .ended:
translateBottomSheetAtEndOfPan(withVerticalTranslation: translation.y)
setImageBasedOnBottomViewState()
initialBottomSpace = 0.0
default:
break
}
}
/**
This method sets bottom sheet translation while pan gesture state is continuously changing.
*/
private func translateBottomSheet(withVerticalTranslation verticalTranslation: CGFloat) {
let bottomSpace = initialBottomSpace - verticalTranslation
guard bottomSpace <= 0.0 && bottomSpace >= inferenceViewController!.collapsedHeight - bottomSheetView.bounds.size.height else {
return
}
setBottomSheetLayout(withBottomSpace: bottomSpace)
}
/**
This method changes bottom sheet state to either fully expanded or closed at the end of pan.
*/
private func translateBottomSheetAtEndOfPan(withVerticalTranslation verticalTranslation: CGFloat) {
// Changes bottom sheet state to either fully open or closed at the end of pan.
let bottomSpace = bottomSpaceAtEndOfPan(withVerticalTranslation: verticalTranslation)
setBottomSheetLayout(withBottomSpace: bottomSpace)
}
/**
Return the final state of the bottom sheet view (whether fully collapsed or expanded) that is to be retained.
*/
private func bottomSpaceAtEndOfPan(withVerticalTranslation verticalTranslation: CGFloat) -> CGFloat {
// Calculates whether to fully expand or collapse bottom sheet when pan gesture ends.
var bottomSpace = initialBottomSpace - verticalTranslation
var height: CGFloat = 0.0
if initialBottomSpace == 0.0 {
height = bottomSheetView.bounds.size.height
}
else {
height = inferenceViewController!.collapsedHeight
}
let currentHeight = bottomSheetView.bounds.size.height + bottomSpace
if currentHeight - height <= collapseTransitionThreshold {
bottomSpace = inferenceViewController!.collapsedHeight - bottomSheetView.bounds.size.height
}
else if currentHeight - height >= expandThransitionThreshold {
bottomSpace = 0.0
}
else {
bottomSpace = initialBottomSpace
}
return bottomSpace
}
/**
This method layouts the change of the bottom space of bottom sheet with respect to the view managed by this controller.
*/
func setBottomSheetLayout(withBottomSpace bottomSpace: CGFloat) {
view.setNeedsLayout()
bottomSheetViewBottomSpace.constant = bottomSpace
view.setNeedsLayout()
}
}
CameraFeedManager:
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import UIKit
import AVFoundation
// MARK: CameraFeedManagerDelegate Declaration
protocol CameraFeedManagerDelegate: AnyObject {
/**
This method delivers the pixel buffer of the current frame seen by the device's camera.
*/
func didOutput(pixelBuffer: CVPixelBuffer)
/**
This method initimates that the camera permissions have been denied.
*/
func presentCameraPermissionsDeniedAlert()
/**
This method initimates that there was an error in video configurtion.
*/
func presentVideoConfigurationErrorAlert()
/**
This method initimates that a session runtime error occured.
*/
func sessionRunTimeErrorOccured()
/**
This method initimates that the session was interrupted.
*/
func sessionWasInterrupted(canResumeManually resumeManually: Bool)
/**
This method initimates that the session interruption has ended.
*/
func sessionInterruptionEnded()
}
/**
This enum holds the state of the camera initialization.
*/
enum CameraConfiguration {
case success
case failed
case permissionDenied
}
/**
This class manages all camera related functionality
*/
class CameraFeedManager: NSObject, AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { // << --- Mine
print("Video recorded to: " + outputFileURL.absoluteString)
}
// MARK: Camera Related Instance Variables
private let session: AVCaptureSession = AVCaptureSession()
private let previewView: PreviewView
private let sessionQueue = DispatchQueue(label: "sessionQueue")
private var cameraConfiguration: CameraConfiguration = .failed
private lazy var videoDataOutput = AVCaptureVideoDataOutput()
private var movieDataOutput = AVCaptureMovieFileOutput() // << --- Mine
private var isSessionRunning = false
// MARK: CameraFeedManagerDelegate
weak var delegate: CameraFeedManagerDelegate?
// MARK: Initializer
init(previewView: PreviewView) {
self.previewView = previewView
super.init()
// Initializes the session
session.sessionPreset = .high
self.previewView.session = session
self.previewView.previewLayer.connection?.videoOrientation = .portrait
self.previewView.previewLayer.videoGravity = .resizeAspectFill
self.attemptToConfigureSession()
}
// MARK: Session Start and End methods
/**
This method starts an AVCaptureSession based on whether the camera configuration was successful.
*/
func checkCameraConfigurationAndStartSession() {
sessionQueue.async {
switch self.cameraConfiguration {
case .success:
self.addObservers()
self.startSession()
case .failed:
DispatchQueue.main.async {
self.delegate?.presentVideoConfigurationErrorAlert()
}
case .permissionDenied:
DispatchQueue.main.async {
self.delegate?.presentCameraPermissionsDeniedAlert()
}
}
}
}
/**
This method stops a running an AVCaptureSession.
*/
func stopSession() {
self.removeObservers()
sessionQueue.async {
if self.session.isRunning {
self.session.stopRunning()
self.isSessionRunning = self.session.isRunning
}
}
}
/**
This method resumes an interrupted AVCaptureSession.
*/
func resumeInterruptedSession(withCompletion completion: #escaping (Bool) -> ()) {
sessionQueue.async {
self.startSession()
DispatchQueue.main.async {
completion(self.isSessionRunning)
}
}
}
/**
This method starts the AVCaptureSession
**/
private func startSession() {
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
}
// MARK: Session Configuration Methods.
/**
This method requests for camera permissions and handles the configuration of the session and stores the result of configuration.
*/
private func attemptToConfigureSession() {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
self.cameraConfiguration = .success
case .notDetermined:
self.sessionQueue.suspend()
self.requestCameraAccess(completion: { (granted) in
self.sessionQueue.resume()
})
case .denied:
self.cameraConfiguration = .permissionDenied
default:
break
}
self.sessionQueue.async {
self.configureSession()
}
}
/**
This method requests for camera permissions.
*/
private func requestCameraAccess(completion: #escaping (Bool) -> ()) {
AVCaptureDevice.requestAccess(for: .video) { (granted) in
if !granted {
self.cameraConfiguration = .permissionDenied
}
else {
self.cameraConfiguration = .success
}
completion(granted)
}
}
/**
This method handles all the steps to configure an AVCaptureSession.
*/
private func configureSession() {
guard cameraConfiguration == .success else {
return
}
session.beginConfiguration()
// Tries to add an AVCaptureDeviceInput.
guard addVideoDeviceInput() == true else {
self.session.commitConfiguration()
self.cameraConfiguration = .failed
return
}
// Tries to add an AVCaptureVideoDataOutput.
guard addVideoDataOutput() else {
self.session.commitConfiguration()
self.cameraConfiguration = .failed
return
}
session.commitConfiguration()
self.cameraConfiguration = .success
}
func startRecording() {. // << --- Mine
self.session.addOutput(movieDataOutput)
guard let homeDirectory = FileManager.default.urls(for: .desktopDirectory, in: .userDomainMask).first else { return }
let url = URL(fileURLWithPath: homeDirectory.absoluteString + "/mymovie.mov")
movieDataOutput.startRecording(to: url , recordingDelegate: self)
}
func stopRecording() { // <<< -- Mine
self.movieDataOutput.stopRecording()
self.session.removeOutput(movieDataOutput)
}
/**
This method tries to an AVCaptureDeviceInput to the current AVCaptureSession.
*/
private func addVideoDeviceInput() -> Bool {
/**Tries to get the default back camera.
*/
guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
return false
}
do {
let videoDeviceInput = try AVCaptureDeviceInput(device: camera)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
return true
}
else {
return false
}
}
catch {
fatalError("Cannot create video device input")
}
}
/**
This method tries to an AVCaptureVideoDataOutput to the current AVCaptureSession.
*/
private func addVideoDataOutput() -> Bool {
let sampleBufferQueue = DispatchQueue(label: "sampleBufferQueue")
videoDataOutput.setSampleBufferDelegate(self, queue: sampleBufferQueue)
videoDataOutput.alwaysDiscardsLateVideoFrames = true
videoDataOutput.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : kCMPixelFormat_32BGRA]
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
videoDataOutput.connection(with: .video)?.videoOrientation = .portrait
return true
}
return false
}
// MARK: Notification Observer Handling
private func addObservers() {
NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionRuntimeErrorOccured(notification:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: session)
NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionWasInterrupted(notification:)), name: NSNotification.Name.AVCaptureSessionWasInterrupted, object: session)
NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionInterruptionEnded), name: NSNotification.Name.AVCaptureSessionInterruptionEnded, object: session)
}
private func removeObservers() {
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionRuntimeError, object: session)
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionWasInterrupted, object: session)
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionInterruptionEnded, object: session)
}
// MARK: Notification Observers
#objc func sessionWasInterrupted(notification: Notification) {
if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?,
let reasonIntegerValue = userInfoValue.integerValue,
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
print("Capture session was interrupted with reason \(reason)")
var canResumeManually = false
if reason == .videoDeviceInUseByAnotherClient {
canResumeManually = true
} else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps {
canResumeManually = false
}
self.delegate?.sessionWasInterrupted(canResumeManually: canResumeManually)
}
}
#objc func sessionInterruptionEnded(notification: Notification) {
self.delegate?.sessionInterruptionEnded()
}
#objc func sessionRuntimeErrorOccured(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
print("Capture session runtime error: \(error)")
if error.code == .mediaServicesWereReset {
sessionQueue.async {
if self.isSessionRunning {
self.startSession()
} else {
DispatchQueue.main.async {
self.delegate?.sessionRunTimeErrorOccured()
}
}
}
} else {
self.delegate?.sessionRunTimeErrorOccured()
}
}
}
/**
AVCaptureVideoDataOutputSampleBufferDelegate
*/
extension CameraFeedManager: AVCaptureVideoDataOutputSampleBufferDelegate {
/** This method delegates the CVPixelBuffer of the frame seen by the camera currently.
*/
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Converts the CMSampleBuffer to a CVPixelBuffer.
let pixelBuffer: CVPixelBuffer? = CMSampleBufferGetImageBuffer(sampleBuffer)
guard let imagePixelBuffer = pixelBuffer else {
return
}
// Delegates the pixel buffer to the ViewController.
delegate?.didOutput(pixelBuffer: imagePixelBuffer)
}
}
PlayerController:
import Foundation
import UIKit
import AVFoundation
import AVKit
class PlayerController : UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let homeDirectory = FileManager.default.urls(for: .desktopDirectory, in: .userDomainMask).first else { return }
let url = URL(fileURLWithPath: homeDirectory.absoluteString + "/mymovie.mov")
print(url.absoluteString)
let player = AVPlayer(url: url) // video path coming from above function
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
The solution was to create the path using:
private func documentDirectory() -> String {
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory,
.userDomainMask,
true)
return documentDirectory[0]
}
private func append(toPath path: String,
withPathComponent pathComponent: String) -> String? {
if var pathURL = URL(string: path) {
pathURL.appendPathComponent(pathComponent)
return pathURL.absoluteString
}
return nil
}
and
guard let path = append(toPath: documentDirectory(), withPathComponent: "movie_test.mov") else {return}
I am trying to make a barcode scanner app. As soon as the camera session begins, the app crashes within a few seconds.
I am unable to find the reason behind this. and, how to fix this one.
I have used https://www.appcoda.com/barcode-reader-swift/ to make the barcde scanner.
import Foundation
import UIKit
import AVFoundation
import CoreData
enum BarcodeScanError : String{
case cameraLoadFailed = "Camera Load Failed"
case NoValidBarcode = "No Valid Barcode"
}
class ScanBoardingPassViewController : UIViewController {
//MARK: - Properties
var viewModel : ScanBoardingPassViewModel? = nil
var captureSession : AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var qrCodeFrameView: UIView?
private let supportedCodeTypes = [AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417]
//MARK: - Outlets
#IBOutlet weak var btnCancel: UIButton!
//MARK: - View Life Cycle
override func viewDidLoad() {
viewModel = ScanBoardingPassViewModel()
self.captureSession = AVCaptureSession()
self.setUpView()
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: - Set Up View
func setUpView() {
self.setUpBarCodeScanner()
self.view.bringSubviewToFront(self.btnCancel)
self.setUpBarcodeRecognizerFrame()
}
private func setUpBarCodeScanner() {
// Get the back-facing camera for capturing videos
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
debugPrint(BarcodeScanError.cameraLoadFailed)
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
}
private func setUpBarcodeRecognizerFrame() {
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubviewToFront(qrCodeFrameView)
}
}
//MARK: - Outlets
#IBAction func btnCancelPressed(_ sender: UIButton) {
self.dismissView()
}
func dismissView() {
self.dismiss(animated: true, completion: nil)
}
}
extension ScanBoardingPassViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// Check if the metadataObjects array is not nil and it contains at least one object.
if metadataObjects.count == 0 {
qrCodeFrameView?.frame = CGRect.zero
debugPrint(BarcodeScanError.NoValidBarcode)
return
}
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if supportedCodeTypes.contains(metadataObj.type) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
qrCodeFrameView?.frame = barCodeObject!.bounds
if metadataObj.stringValue != nil {
captureSession?.stopRunning()
debugPrint("Valid Barcode found \(metadataObj.stringValue!)")
if let boardingPass = viewModel?.parseBoardingPassString(boardingPassString : metadataObj.stringValue!) {
let unitOfWork = UnitOfWork(context:( UIApplication.shared.delegate as! AppDelegate).persistentContainer.newBackgroundContext() )
unitOfWork.boardingPassRepository.saveBoardingPasses(boardingPass: boardingPass)
unitOfWork.saveChanges()
print(unitOfWork.boardingPassRepository.getBoardingPasses(predicate: nil))
self.dismissView()
}
}
}
}
}
The camera doesnt get struck. But, the app gives an lldb everytime within a few seconds.
// Created by Satya Narayana on 17/11/20.
//
import UIKit
import AVFoundation
import UIKit
class QRViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
//MARK: Outlets
#IBOutlet weak var qrLbl: UILabel! // BarCode displaying Label
#IBOutlet weak var sView: UIView! // View
//MARK: Variables
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
//MARK: View Methods
override func viewDidLoad() {
super.viewDidLoad()
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr, .ean13, .code128]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = sView.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
sView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
self.showToast(message: "Scanning not supported.Your device does not support scanning a code from an item. Please use a device with a camera.", seconds: 1.0)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
qrLbl.isHidden = true
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
//MARK:- Found BARCODE
func found(code: String) {
print(code)
if code != ""{
print(code) // This is Barcode
qrLbl.text = code
}else{
// if you need run again uncomment below line
//self.captureSession.startRunning()
}
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
I am developing simple app in SwiftUI for one internet radio. It uses AVPlayer for play the stream available at given url. And that works perfectly. I have also set up AVSession in AppDelegate, so the app plays in background, stops playing while the call is incoming and resumes playing after the call. This all works fine. However, I wasn't able neither to bring the remote control on lock screen nor showing app in Player tile in Control Center.
The app is written using SwiftUI, I am also moving from traditional completion blocks and targets into Combine. I have created separate class Player, which is ObservableObject (and observed by ContentView), where I set up AVPlayer, AVPlayerItem (with given URL for stream). And all works fine. App updates the state on change of player state. I am not using AVPlayerViewController, since I don't need one. On initialization of that Player object I am also setting up Remote Transport Controls using this method (I moved from setting targets to publishers).
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.publisher(for: \.playCommand)
.sink(receiveValue: {_ in self.play() })
.store(in: &cancellables)
commandCenter.publisher(for: \.stopCommand)
.sink(receiveValue: {_ in self.stop() })
.store(in: &cancellables)
}
Either I use the original version of that method provided by Apple, or my own version (as shown above) the Remote Control doesn't show up, and the Control Center tile player is not updated.
Of course I use the method provided by Apple for updating NowPlaying
func setupNowPlaying() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Radio"
if let image = UIImage(systemName: "radio") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentItem?.currentTime().seconds ?? ""
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = player?.currentItem?.asset.duration.seconds ?? ""
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? 1 : 0
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
I don't know where is the problem. Is it the way I set up Remote Transport Controls? The flow is like this:
Observable Player object with AVPlayer and setup for Remote Transport Controls and NowPlaying -> observed by -> Content View.
Here is full listing for Player class:
import Foundation
import AVKit
import Combine
import MediaPlayer
class Player: ObservableObject {
private let streamURL = URL(string: "https://stream.rcs.revma.com/ypqt40u0x1zuv")!
#Published var status: Player.Status = .stopped
#Published var isPlaying = false
#Published var showError = false
#Published var isMuted = false
var player: AVPlayer?
var cancellables = Set<AnyCancellable>()
init() {
setupRemoteTransportControls()
}
func setupPlayer() {
let item = AVPlayerItem(url: streamURL)
player = AVPlayer(playerItem: item)
player?.allowsExternalPlayback = true
}
func play() {
handleInterruption()
handleRouteChange()
setupPlayer()
player?.play()
player?.currentItem?.publisher(for: \.status)
.sink(receiveValue: { status in
self.handle(status: status)
})
.store(in: &cancellables)
}
func stop() {
player?.pause()
player = nil
status = .stopped
}
func mute() {
player?.isMuted.toggle()
isMuted.toggle()
}
func handle(status: AVPlayerItem.Status) {
switch status {
case .unknown:
self.status = .waiting
self.isPlaying = false
case .readyToPlay:
self.status = .ready
self.isPlaying = true
self.setupNowPlaying()
case .failed:
self.status = .failed
self.isPlaying = false
self.showError = true
self.setupNowPlaying()
default:
self.status = .stopped
self.isPlaying = false
self.setupNowPlaying()
}
}
func handleInterruption() {
NotificationCenter.default.publisher(for: AVAudioSession.interruptionNotification)
.map(\.userInfo)
.compactMap {
$0?[AVAudioSessionInterruptionTypeKey] as? UInt
}
.map { AVAudioSession.InterruptionType(rawValue: $0)}
.sink { (interruptionType) in
self.handle(interruptionType: interruptionType)
}
.store(in: &cancellables)
}
func handle(interruptionType: AVAudioSession.InterruptionType?) {
switch interruptionType {
case .began:
self.stop()
case .ended:
self.play()
default:
break
}
}
typealias UInfo = [AnyHashable: Any]
func handleRouteChange() {
NotificationCenter.default.publisher(for: AVAudioSession.routeChangeNotification)
.map(\.userInfo)
.compactMap({ (userInfo) -> (UInfo?, UInt?) in
(userInfo, userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt)
})
.compactMap({ (result) -> (UInfo?, AVAudioSession.RouteChangeReason?) in
(result.0, AVAudioSession.RouteChangeReason(rawValue: result.1 ?? 0))
})
.sink(receiveValue: { (result) in
self.handle(reason: result.1, userInfo: result.0)
})
.store(in: &cancellables)
}
func handle(reason: AVAudioSession.RouteChangeReason?, userInfo: UInfo?) {
switch reason {
case .newDeviceAvailable:
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSession.Port.headphones {
DispatchQueue.main.async {
self.play()
}
}
case .oldDeviceUnavailable:
if let previousRoute = userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
for output in previousRoute.outputs where output.portType == AVAudioSession.Port.headphones {
DispatchQueue.main.sync {
self.stop()
}
break
}
}
default:
break
}
}
}
extension Player {
enum Status {
case waiting, ready, failed, stopped
}
}
extension Player {
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.publisher(for: \.playCommand)
.sink(receiveValue: {_ in self.play() })
.store(in: &cancellables)
commandCenter.publisher(for: \.stopCommand)
.sink(receiveValue: {_ in self.stop() })
.store(in: &cancellables)
}
func setupNowPlaying() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Radio"
if let image = UIImage(systemName: "radio") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentItem?.currentTime().seconds ?? ""
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = player?.currentItem?.asset.duration.seconds ?? ""
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? 1 : 0
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
It appeared that I need to add in AppDelegate in application(:didFinishLaunchWithOptions) one line of code:
UIApplication.shared.beginReceivingRemoteControlEvents()
That solved the problem. Now remote controller is visible on lock screen and it also works in Control Center.
Once additional fix. Changing targets to publisher in setupRemoteTransportControls() in my Player object didn't work. So I switched back to setting targets like this.
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { event in
self.play()
return .success
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { event in
self.stop()
return .success
}
}
The Apple ARKitVision example has the following declaration in the ViewController.swift file:
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
However, if I copy the same views and source files and incorporate them into another test storyboard/project I get the error message "Instance member 'children' cannot be used on type 'StatusViewController'"
So, why does this work on the ARKitVision example but it does not work if I set it up myself from scratch? What else is the ARKitVision example doing to get this working? Thanks 😊
The complete class definition for StatusViewController is:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Utility class for showing messages above the AR view.
*/
import Foundation
import ARKit
/**
Displayed at the top of the main interface of the app that allows users to see
the status of the AR experience, as well as the ability to control restarting
the experience altogether.
- Tag: StatusViewController
*/
class StatusViewController: UIViewController {
// MARK: - Types
enum MessageType {
case trackingStateEscalation
case planeEstimation
case contentPlacement
case focusSquare
static var all: [MessageType] = [
.trackingStateEscalation,
.planeEstimation,
.contentPlacement,
.focusSquare
]
}
// MARK: - IBOutlets
#IBOutlet weak private var messagePanel: UIVisualEffectView!
#IBOutlet weak private var messageLabel: UILabel!
#IBOutlet weak private var restartExperienceButton: UIButton!
// MARK: - Properties
/// Trigerred when the "Restart Experience" button is tapped.
var restartExperienceHandler: () -> Void = {}
/// Seconds before the timer message should fade out. Adjust if the app needs longer transient messages.
private let displayDuration: TimeInterval = 6
// Timer for hiding messages.
private var messageHideTimer: Timer?
private var timers: [MessageType: Timer] = [:]
// MARK: - Message Handling
func showMessage(_ text: String, autoHide: Bool = true) {
// Cancel any previous hide timer.
messageHideTimer?.invalidate()
messageLabel.text = text
// Make sure status is showing.
setMessageHidden(false, animated: true)
if autoHide {
messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
self?.setMessageHidden(true, animated: true)
})
}
}
func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
cancelScheduledMessage(for: messageType)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
self?.showMessage(text)
timer.invalidate()
})
timers[messageType] = timer
}
func cancelScheduledMessage(`for` messageType: MessageType) {
timers[messageType]?.invalidate()
timers[messageType] = nil
}
func cancelAllScheduledMessages() {
for messageType in MessageType.all {
cancelScheduledMessage(for: messageType)
}
}
// MARK: - ARKit
func showTrackingQualityInfo(for trackingState: ARCamera.TrackingState, autoHide: Bool) {
showMessage(trackingState.presentationString, autoHide: autoHide)
}
func escalateFeedback(for trackingState: ARCamera.TrackingState, inSeconds seconds: TimeInterval) {
cancelScheduledMessage(for: .trackingStateEscalation)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [unowned self] _ in
self.cancelScheduledMessage(for: .trackingStateEscalation)
var message = trackingState.presentationString
if let recommendation = trackingState.recommendation {
message.append(": \(recommendation)")
}
self.showMessage(message, autoHide: false)
})
timers[.trackingStateEscalation] = timer
}
// MARK: - IBActions
#IBAction private func restartExperience(_ sender: UIButton) {
restartExperienceHandler()
}
// MARK: - Panel Visibility
private func setMessageHidden(_ hide: Bool, animated: Bool) {
// The panel starts out hidden, so show it before animating opacity.
messagePanel.isHidden = false
guard animated else {
messagePanel.alpha = hide ? 0 : 1
return
}
UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
self.messagePanel.alpha = hide ? 0 : 1
}, completion: nil)
}
}
extension ARCamera.TrackingState {
var presentationString: String {
switch self {
case .notAvailable:
return "TRACKING UNAVAILABLE"
case .normal:
return "TRACKING NORMAL"
case .limited(.excessiveMotion):
return "TRACKING LIMITED\nExcessive motion"
case .limited(.insufficientFeatures):
return "TRACKING LIMITED\nLow detail"
case .limited(.initializing):
return "Initializing"
case .limited(.relocalizing):
return "Recovering from interruption"
}
}
var recommendation: String? {
switch self {
case .limited(.excessiveMotion):
return "Try slowing down your movement, or reset the session."
case .limited(.insufficientFeatures):
return "Try pointing at a flat surface, or reset the session."
case .limited(.relocalizing):
return "Return to the location where you left off or try resetting the session."
default:
return nil
}
}
}
The definition of the ViewController class is:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Main view controller for the ARKitVision sample.
*/
import UIKit
import SpriteKit
import ARKit
import Vision
class ViewController: UIViewController, UIGestureRecognizerDelegate, ARSKViewDelegate, ARSessionDelegate {
#IBOutlet weak var sceneView: ARSKView!
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
// MARK: - View controller lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// Configure and present the SpriteKit scene that draws overlay content.
let overlayScene = SKScene()
overlayScene.scaleMode = .aspectFill
sceneView.delegate = self
sceneView.presentScene(overlayScene)
sceneView.session.delegate = self
// Hook up status view controller callback.
statusViewController.restartExperienceHandler = { [unowned self] in
self.restartSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSessionDelegate
// Pass camera frames received from ARKit to Vision (when not already processing one)
/// - Tag: ConsumeARFrames
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Do not enqueue other buffers for processing while another Vision task is still running.
// The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
// Retain the image buffer for Vision processing.
self.currentBuffer = frame.capturedImage
classifyCurrentImage()
}
// MARK: - Vision classification
// Vision classification request and model
/// - Tag: ClassificationRequest
private lazy var classificationRequest: VNCoreMLRequest = {
do {
// Instantiate the model from its generated Swift class.
let model = try VNCoreMLModel(for: Inceptionv3().model)
let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
// Crop input images to square area at center, matching the way the ML model was trained.
request.imageCropAndScaleOption = .centerCrop
// Use CPU for Vision processing to ensure that there are adequate GPU resources for rendering.
request.usesCPUOnly = true
return request
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
// The pixel buffer being held for analysis; used to serialize Vision requests.
private var currentBuffer: CVPixelBuffer?
// Queue for dispatching vision classification requests
private let visionQueue = DispatchQueue(label: "com.example.apple-samplecode.ARKitVision.serialVisionQueue")
// Run the Vision+ML classifier on the current image buffer.
/// - Tag: ClassifyCurrentImage
private func classifyCurrentImage() {
// Most computer vision tasks are not rotation agnostic so it is important to pass in the orientation of the image with respect to device.
let orientation = CGImagePropertyOrientation(UIDevice.current.orientation)
let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer!, orientation: orientation)
visionQueue.async {
do {
// Release the pixel buffer when done, allowing the next buffer to be processed.
defer { self.currentBuffer = nil }
try requestHandler.perform([self.classificationRequest])
} catch {
print("Error: Vision request failed with error \"\(error)\"")
}
}
}
// Classification results
private var identifierString = ""
private var confidence: VNConfidence = 0.0
// Handle completion of the Vision request and choose results to display.
/// - Tag: ProcessClassifications
func processClassifications(for request: VNRequest, error: Error?) {
guard let results = request.results else {
print("Unable to classify image.\n\(error!.localizedDescription)")
return
}
// The `results` will always be `VNClassificationObservation`s, as specified by the Core ML model in this project.
let classifications = results as! [VNClassificationObservation]
// Show a label for the highest-confidence result (but only above a minimum confidence threshold).
if let bestResult = classifications.first(where: { result in result.confidence > 0.5 }),
let label = bestResult.identifier.split(separator: ",").first {
identifierString = String(label)
confidence = bestResult.confidence
} else {
identifierString = ""
confidence = 0
}
DispatchQueue.main.async { [weak self] in
self?.displayClassifierResults()
}
}
// Show the classification results in the UI.
private func displayClassifierResults() {
guard !self.identifierString.isEmpty else {
return // No object was classified.
}
let message = String(format: "Detected \(self.identifierString) with %.2f", self.confidence * 100) + "% confidence"
statusViewController.showMessage(message)
}
// MARK: - Tap gesture handler & ARSKViewDelegate
// Labels for classified objects by ARAnchor UUID
private var anchorLabels = [UUID: String]()
// When the user taps, add an anchor associated with the current classification result.
/// - Tag: PlaceLabelAtLocation
#IBAction func placeLabelAtLocation(sender: UITapGestureRecognizer) {
let hitLocationInView = sender.location(in: sceneView)
let hitTestResults = sceneView.hitTest(hitLocationInView, types: [.featurePoint, .estimatedHorizontalPlane])
if let result = hitTestResults.first {
// Add a new anchor at the tap location.
let anchor = ARAnchor(transform: result.worldTransform)
sceneView.session.add(anchor: anchor)
// Track anchor ID to associate text with the anchor after ARKit creates a corresponding SKNode.
anchorLabels[anchor.identifier] = identifierString
}
}
// When an anchor is added, provide a SpriteKit node for it and set its text to the classification label.
/// - Tag: UpdateARContent
func view(_ view: ARSKView, didAdd node: SKNode, for anchor: ARAnchor) {
guard let labelText = anchorLabels[anchor.identifier] else {
fatalError("missing expected associated label for anchor")
}
let label = TemplateLabelNode(text: labelText)
node.addChild(label)
}
// MARK: - AR Session Handling
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
statusViewController.showTrackingQualityInfo(for: camera.trackingState, autoHide: true)
switch camera.trackingState {
case .notAvailable, .limited:
statusViewController.escalateFeedback(for: camera.trackingState, inSeconds: 3.0)
case .normal:
statusViewController.cancelScheduledMessage(for: .trackingStateEscalation)
// Unhide content after successful relocalization.
setOverlaysHidden(false)
}
}
func session(_ session: ARSession, didFailWithError error: Error) {
guard error is ARError else { return }
let errorWithInfo = error as NSError
let messages = [
errorWithInfo.localizedDescription,
errorWithInfo.localizedFailureReason,
errorWithInfo.localizedRecoverySuggestion
]
// Filter out optional error messages.
let errorMessage = messages.compactMap({ $0 }).joined(separator: "\n")
DispatchQueue.main.async {
self.displayErrorMessage(title: "The AR session failed.", message: errorMessage)
}
}
func sessionWasInterrupted(_ session: ARSession) {
setOverlaysHidden(true)
}
func sessionShouldAttemptRelocalization(_ session: ARSession) -> Bool {
/*
Allow the session to attempt to resume after an interruption.
This process may not succeed, so the app must be prepared
to reset the session if the relocalizing status continues
for a long time -- see `escalateFeedback` in `StatusViewController`.
*/
return true
}
private func setOverlaysHidden(_ shouldHide: Bool) {
sceneView.scene!.children.forEach { node in
if shouldHide {
// Hide overlay content immediately during relocalization.
node.alpha = 0
} else {
// Fade overlay content in after relocalization succeeds.
node.run(.fadeIn(withDuration: 0.5))
}
}
}
private func restartSession() {
statusViewController.cancelAllScheduledMessages()
statusViewController.showMessage("RESTARTING SESSION")
anchorLabels = [UUID: String]()
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - Error handling
private func displayErrorMessage(title: String, message: String) {
// Present an alert informing about the error that has occurred.
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let restartAction = UIAlertAction(title: "Restart Session", style: .default) { _ in
alertController.dismiss(animated: true, completion: nil)
self.restartSession()
}
alertController.addAction(restartAction)
present(alertController, animated: true, completion: nil)
}
}
What this would indicate is that your class StatusViewController doesn't inherit from UIViewController, as the property of children has been available to a subclass of UIViewController for quite some time.
Are you able to share how you have composed your StatusViewController?
I have an ARSCNView that can occasionally pause its session depending on the situation. Is there a way to check if its session is running?
Something like this:
class myARView: ARSCNView {
...
func foo() {
if(session.running) {
// do stuff
}
}
...
}
At this moment, it seems there isn't a way to check if the session is running, by ARSession object itself. However, by implementing ARSCNViewDelegate, you can get notified whenever your session is interrupted or the interruption ends.
One way to achieve your goal is to set a boolean and update it whenever you pause/resume the session, and check its value in your functions.
class ViewController: UIViewController, ARSCNViewDelegate {
var isSessionRunning: Bool
func foo() {
if self.isSessionRunning {
// do stuff
}
}
func pauseSession() {
self.sceneView.session.pause()
self.isSessionRunning = false
}
func runSession() {
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration)
self.isSessionRunning = true
}
// ARSCNViewDelegate:
func sessionWasInterrupted(_ session: ARSession) {
self.isSessionRunning = false
}
func sessionInterruptionEnded(_ session: ARSession) {
self.isSessionRunning = true
}
}