Check if ARSession is running (ARKit) - ios

I have an ARSCNView that can occasionally pause its session depending on the situation. Is there a way to check if its session is running?
Something like this:
class myARView: ARSCNView {
...
func foo() {
if(session.running) {
// do stuff
}
}
...
}

At this moment, it seems there isn't a way to check if the session is running, by ARSession object itself. However, by implementing ARSCNViewDelegate, you can get notified whenever your session is interrupted or the interruption ends.
One way to achieve your goal is to set a boolean and update it whenever you pause/resume the session, and check its value in your functions.
class ViewController: UIViewController, ARSCNViewDelegate {
var isSessionRunning: Bool
func foo() {
if self.isSessionRunning {
// do stuff
}
}
func pauseSession() {
self.sceneView.session.pause()
self.isSessionRunning = false
}
func runSession() {
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration)
self.isSessionRunning = true
}
// ARSCNViewDelegate:
func sessionWasInterrupted(_ session: ARSession) {
self.isSessionRunning = false
}
func sessionInterruptionEnded(_ session: ARSession) {
self.isSessionRunning = true
}
}

Related

Stopping and starting a sessionQueue.async

I've got a AV captureSession which starts like this on app init:
class FrameExtractor: NSObject {
private let captureSession = AVCaptureSession()
deinit {
captureSession.stopRunning()
}
fileprivate override init() {
super.init()
sessionQueue.async {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.configureSession()
self.captureSession.startRunning()
self.listener = NotificationCenter.default.publisher(for: UIDevice.orientationDidChangeNotification)
.compactMap { _ in (UIApplication.shared.connectedScenes.first as? UIWindowScene)?.interfaceOrientation ?? .unknown }
.assign(to: \.orientation, on: self)
}
}
}
}
This works fine. But for some reason the captureSession stops after a certain amount of (completely unrelated) file input/output. Thus I was thinking maybe I should stop (captureSession.stopRunning()) and start the captureSession before all file I/O since this class never gets deinitialised where the stopRunning is located. My question what would be the best practice to stop this session from outside this class without starting additional threats (Queues)?

Start WKExtendedRuntimeSession (WKExtendedRuntimeObject was dealloced while running)

I have a problem when I try to start WKExtendedRuntimeSession
-[WKExtendedRuntimeSession dealloc]:285: WKExtendedRuntimeObject was dealloced while running. Invalidating the session 46EB2DE0-311C-41D0-93BE-46FE744B685A
class SessionCoordinator: NSObject, WKExtendedRuntimeSessionDelegate {
let runtimeSession = WKExtendedRuntimeSession();
override init() {
super.init()
runtimeSession.delegate = self;
runtimeSession.start();
}
...
}
I ran across the same issue. Here is my solution.
Create a simple singleton class to manage WKExtendedRuntimeSession
import Foundation
import SwiftUI
class ExtendedRunTime: NSObject, WKExtendedRuntimeSessionDelegate {
static let shared = ExtendedRunTime()
let session: WKExtendedRuntimeSession
override init() {
// Create the session object.
session = WKExtendedRuntimeSession()
super.init()
// Assign the delegate.
session.delegate = self
}
func extendedRuntimeSession(_ extendedRuntimeSession: WKExtendedRuntimeSession, didInvalidateWith reason: WKExtendedRuntimeSessionInvalidationReason, error: Error?) {
print("didInvalidateWithReason: \(reason)")
}
func extendedRuntimeSessionDidStart(_ extendedRuntimeSession: WKExtendedRuntimeSession) {
print("extendedRuntimeSessionDidStart")
}
func extendedRuntimeSessionWillExpire(_ extendedRuntimeSession: WKExtendedRuntimeSession) {
print("extendedRuntimeSessionWillExpire")
}
func start() {
session.start()
}
func stop() {
session.invalidate()
}
}
Init the singleton. You can do this in .appear() or in your WKApplicationDelegate applicationDidBecomeActive()
_ = ExtendedRunTime.shared
Start the session where it is needed
ExtendedRunTime.shared.start()
Make sure to stop it when finished
ExtendedRunTime.shared.stop()

Protocol-Delegate pattern not notifying View Controller

My Model saves data to Firestore. Once that data is saved, I'd like it to alert my ViewController so that a function can be called. However, nothing is being passed to my ViewController.
This is my Model:
protocol ProtocolModel {
func wasDataSavedSuccessfully(dataSavedSuccessfully:Bool)
}
class Model {
var delegate:ProtocolModel?
func createUserAddedRecipe(
docId:String,
completion: #escaping (Recipe?) -> Void) {
let db = Firestore.firestore()
do {
try db.collection("userFavourites").document(currentUserId).collection("userRecipes").document(docId).setData(from: recipe) { (error) in
print("Data Saved Successfully") // THIS OUTPUTS TO THE CONSOLE
// Notify delegate that data was saved to Firestore
self.delegate?.wasDataSavedSuccessfully(dataSavedSuccessfully: true)
}
}
catch {
print("Error \(error)")
}
}
}
The print("Data Saved Successfully") outputs to the console, but the delegate method right below it doesn't get called.
And this is my ViewController:
class ViewController: UIViewController {
private var model = Model()
override func viewDidLoad() {
super.viewDidLoad()
model.delegate = self
}
}
extension ViewController: ProtocolModel {
func wasDataSavedSuccessfully(dataSavedSuccessfully: Bool) {
if dataSavedSuccessfully == true {
print("Result is true.")
}
else {
print("Result is false.")
}
print("Protocol-Delegate Pattern Works")
}
}
Is there something I'm missing from this pattern? I haven't been able to notice anything different in the articles I've reviewed.
So I test your code and simulate something like that
import UIKit
protocol ProtocolModel {
func wasDataSavedSuccessfully(dataSavedSuccessfully:Bool)
}
class Model {
var delegate:ProtocolModel?
// I use this timer for simulate that firebase store data every 3 seconds for example
var timer: Timer?
func createUserAddedRecipe(
docId:String) {
timer = Timer.scheduledTimer(withTimeInterval: 3, repeats: true, block: { _ in
self.delegate?.wasDataSavedSuccessfully(dataSavedSuccessfully: true)
})
}
}
class NavigationController: UINavigationController {
var model = Model()
override func viewDidLoad() {
super.viewDidLoad()
model.delegate = self
// Call this method to register for network notification
model.createUserAddedRecipe(docId: "exampleId")
}
}
extension NavigationController: ProtocolModel {
func wasDataSavedSuccessfully(dataSavedSuccessfully: Bool) {
print(#function)
}
}
so you can see the result as image below, my delegate update controller that conform to that protocol.

I don't understand why my object doesn't receive notifications

I've create a custom class that looks like:
class FooDevice: Device {
private var controller:FooController?
private var device:Foo?
override init() {
super.init()
if super.authGranted {
NotificationCenter.default.addObserver(self, selector: #selector(self.discovered(_:)), name: NSNotification.Name(rawValue: FooDiscover), object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(self.connected(_:)), name: NSNotification.Name(rawValue: FooConnected), object: nil)
}
}
#objc private func discovered(_ notification:Notification) {
DDLogVerbose("FOO - discovered - \(notification)")
super.scanner?.stopScanFor(._FOO)
// TODO: Call super.connector and connect
}
#objc private func connected(_ notification:Notification) {
DDLogVerbose("FOO - connected - \(notification)")
// Do more stuff after connect
}
func start() {
DDLogVerbose("FOO - startMeasurement()")
super.scanner?.scanFor(._FOO)
}
}
The super class looks like:
class Device: NSObject {
private let LICENSE = "my_license"
private var authenticator:SDKAuthenticator?
internal var scanner:SDKScanner?
internal var connector:SDKConnector?
internal var authGranted = false
override init() {
super.init()
authGranted = self.authRequest(LICENSE)
if authGranted {
scanner = SDKScanner.getInstance()
connector = SDKConnector.getInstance()
} else {
// TODO: Show error to user
}
}
private func authRequest(_ data:String) -> Bool {
// Do stuff using authenticator and authenticated, we can assume we return a true
return status // true
}
}
In my ViewController I make an instance of FooDevice and start the process. I'm doing with the following:
class MyViewController:UIViewController {
// A lot of properties
override viewDidLoad() {
// ViewDidLoad stuff
}
#IBAction func connectToDevice(_ sender: Any) {
// Here I instantiate the object and start the scanning
let myFooDevice = FooDevice()
myFooDevice.start()
}
}
In the console I could see how the scanner start and found the bluetooth device but the notification isn't captured and the log isn't printed. Also the notification names are right, I'm sure because the SDK return the strings.
I don't know what I'm missing. Hope you could throw some light to it.
Your problem is that ARC will cleanup your myFooDevice variable before any notification can reach it.
You better store it in a property:
class MyViewController:UIViewController {
var myFooDevice:FooDevice?
override viewDidLoad() {
// ViewDidLoad stuff
}
#IBAction func connectToDevice(_ sender: Any) {
// Here I instantiate the object and start the scanning
myFooDevice = FooDevice()
myFooDevice!.start()
}
}

Swift: Instance member cannot be used on type in ARKitVision example

The Apple ARKitVision example has the following declaration in the ViewController.swift file:
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
However, if I copy the same views and source files and incorporate them into another test storyboard/project I get the error message "Instance member 'children' cannot be used on type 'StatusViewController'"
So, why does this work on the ARKitVision example but it does not work if I set it up myself from scratch? What else is the ARKitVision example doing to get this working? Thanks 😊
The complete class definition for StatusViewController is:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Utility class for showing messages above the AR view.
*/
import Foundation
import ARKit
/**
Displayed at the top of the main interface of the app that allows users to see
the status of the AR experience, as well as the ability to control restarting
the experience altogether.
- Tag: StatusViewController
*/
class StatusViewController: UIViewController {
// MARK: - Types
enum MessageType {
case trackingStateEscalation
case planeEstimation
case contentPlacement
case focusSquare
static var all: [MessageType] = [
.trackingStateEscalation,
.planeEstimation,
.contentPlacement,
.focusSquare
]
}
// MARK: - IBOutlets
#IBOutlet weak private var messagePanel: UIVisualEffectView!
#IBOutlet weak private var messageLabel: UILabel!
#IBOutlet weak private var restartExperienceButton: UIButton!
// MARK: - Properties
/// Trigerred when the "Restart Experience" button is tapped.
var restartExperienceHandler: () -> Void = {}
/// Seconds before the timer message should fade out. Adjust if the app needs longer transient messages.
private let displayDuration: TimeInterval = 6
// Timer for hiding messages.
private var messageHideTimer: Timer?
private var timers: [MessageType: Timer] = [:]
// MARK: - Message Handling
func showMessage(_ text: String, autoHide: Bool = true) {
// Cancel any previous hide timer.
messageHideTimer?.invalidate()
messageLabel.text = text
// Make sure status is showing.
setMessageHidden(false, animated: true)
if autoHide {
messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
self?.setMessageHidden(true, animated: true)
})
}
}
func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
cancelScheduledMessage(for: messageType)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
self?.showMessage(text)
timer.invalidate()
})
timers[messageType] = timer
}
func cancelScheduledMessage(`for` messageType: MessageType) {
timers[messageType]?.invalidate()
timers[messageType] = nil
}
func cancelAllScheduledMessages() {
for messageType in MessageType.all {
cancelScheduledMessage(for: messageType)
}
}
// MARK: - ARKit
func showTrackingQualityInfo(for trackingState: ARCamera.TrackingState, autoHide: Bool) {
showMessage(trackingState.presentationString, autoHide: autoHide)
}
func escalateFeedback(for trackingState: ARCamera.TrackingState, inSeconds seconds: TimeInterval) {
cancelScheduledMessage(for: .trackingStateEscalation)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [unowned self] _ in
self.cancelScheduledMessage(for: .trackingStateEscalation)
var message = trackingState.presentationString
if let recommendation = trackingState.recommendation {
message.append(": \(recommendation)")
}
self.showMessage(message, autoHide: false)
})
timers[.trackingStateEscalation] = timer
}
// MARK: - IBActions
#IBAction private func restartExperience(_ sender: UIButton) {
restartExperienceHandler()
}
// MARK: - Panel Visibility
private func setMessageHidden(_ hide: Bool, animated: Bool) {
// The panel starts out hidden, so show it before animating opacity.
messagePanel.isHidden = false
guard animated else {
messagePanel.alpha = hide ? 0 : 1
return
}
UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
self.messagePanel.alpha = hide ? 0 : 1
}, completion: nil)
}
}
extension ARCamera.TrackingState {
var presentationString: String {
switch self {
case .notAvailable:
return "TRACKING UNAVAILABLE"
case .normal:
return "TRACKING NORMAL"
case .limited(.excessiveMotion):
return "TRACKING LIMITED\nExcessive motion"
case .limited(.insufficientFeatures):
return "TRACKING LIMITED\nLow detail"
case .limited(.initializing):
return "Initializing"
case .limited(.relocalizing):
return "Recovering from interruption"
}
}
var recommendation: String? {
switch self {
case .limited(.excessiveMotion):
return "Try slowing down your movement, or reset the session."
case .limited(.insufficientFeatures):
return "Try pointing at a flat surface, or reset the session."
case .limited(.relocalizing):
return "Return to the location where you left off or try resetting the session."
default:
return nil
}
}
}
The definition of the ViewController class is:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Main view controller for the ARKitVision sample.
*/
import UIKit
import SpriteKit
import ARKit
import Vision
class ViewController: UIViewController, UIGestureRecognizerDelegate, ARSKViewDelegate, ARSessionDelegate {
#IBOutlet weak var sceneView: ARSKView!
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
// MARK: - View controller lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// Configure and present the SpriteKit scene that draws overlay content.
let overlayScene = SKScene()
overlayScene.scaleMode = .aspectFill
sceneView.delegate = self
sceneView.presentScene(overlayScene)
sceneView.session.delegate = self
// Hook up status view controller callback.
statusViewController.restartExperienceHandler = { [unowned self] in
self.restartSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSessionDelegate
// Pass camera frames received from ARKit to Vision (when not already processing one)
/// - Tag: ConsumeARFrames
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Do not enqueue other buffers for processing while another Vision task is still running.
// The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
// Retain the image buffer for Vision processing.
self.currentBuffer = frame.capturedImage
classifyCurrentImage()
}
// MARK: - Vision classification
// Vision classification request and model
/// - Tag: ClassificationRequest
private lazy var classificationRequest: VNCoreMLRequest = {
do {
// Instantiate the model from its generated Swift class.
let model = try VNCoreMLModel(for: Inceptionv3().model)
let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
// Crop input images to square area at center, matching the way the ML model was trained.
request.imageCropAndScaleOption = .centerCrop
// Use CPU for Vision processing to ensure that there are adequate GPU resources for rendering.
request.usesCPUOnly = true
return request
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
// The pixel buffer being held for analysis; used to serialize Vision requests.
private var currentBuffer: CVPixelBuffer?
// Queue for dispatching vision classification requests
private let visionQueue = DispatchQueue(label: "com.example.apple-samplecode.ARKitVision.serialVisionQueue")
// Run the Vision+ML classifier on the current image buffer.
/// - Tag: ClassifyCurrentImage
private func classifyCurrentImage() {
// Most computer vision tasks are not rotation agnostic so it is important to pass in the orientation of the image with respect to device.
let orientation = CGImagePropertyOrientation(UIDevice.current.orientation)
let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer!, orientation: orientation)
visionQueue.async {
do {
// Release the pixel buffer when done, allowing the next buffer to be processed.
defer { self.currentBuffer = nil }
try requestHandler.perform([self.classificationRequest])
} catch {
print("Error: Vision request failed with error \"\(error)\"")
}
}
}
// Classification results
private var identifierString = ""
private var confidence: VNConfidence = 0.0
// Handle completion of the Vision request and choose results to display.
/// - Tag: ProcessClassifications
func processClassifications(for request: VNRequest, error: Error?) {
guard let results = request.results else {
print("Unable to classify image.\n\(error!.localizedDescription)")
return
}
// The `results` will always be `VNClassificationObservation`s, as specified by the Core ML model in this project.
let classifications = results as! [VNClassificationObservation]
// Show a label for the highest-confidence result (but only above a minimum confidence threshold).
if let bestResult = classifications.first(where: { result in result.confidence > 0.5 }),
let label = bestResult.identifier.split(separator: ",").first {
identifierString = String(label)
confidence = bestResult.confidence
} else {
identifierString = ""
confidence = 0
}
DispatchQueue.main.async { [weak self] in
self?.displayClassifierResults()
}
}
// Show the classification results in the UI.
private func displayClassifierResults() {
guard !self.identifierString.isEmpty else {
return // No object was classified.
}
let message = String(format: "Detected \(self.identifierString) with %.2f", self.confidence * 100) + "% confidence"
statusViewController.showMessage(message)
}
// MARK: - Tap gesture handler & ARSKViewDelegate
// Labels for classified objects by ARAnchor UUID
private var anchorLabels = [UUID: String]()
// When the user taps, add an anchor associated with the current classification result.
/// - Tag: PlaceLabelAtLocation
#IBAction func placeLabelAtLocation(sender: UITapGestureRecognizer) {
let hitLocationInView = sender.location(in: sceneView)
let hitTestResults = sceneView.hitTest(hitLocationInView, types: [.featurePoint, .estimatedHorizontalPlane])
if let result = hitTestResults.first {
// Add a new anchor at the tap location.
let anchor = ARAnchor(transform: result.worldTransform)
sceneView.session.add(anchor: anchor)
// Track anchor ID to associate text with the anchor after ARKit creates a corresponding SKNode.
anchorLabels[anchor.identifier] = identifierString
}
}
// When an anchor is added, provide a SpriteKit node for it and set its text to the classification label.
/// - Tag: UpdateARContent
func view(_ view: ARSKView, didAdd node: SKNode, for anchor: ARAnchor) {
guard let labelText = anchorLabels[anchor.identifier] else {
fatalError("missing expected associated label for anchor")
}
let label = TemplateLabelNode(text: labelText)
node.addChild(label)
}
// MARK: - AR Session Handling
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
statusViewController.showTrackingQualityInfo(for: camera.trackingState, autoHide: true)
switch camera.trackingState {
case .notAvailable, .limited:
statusViewController.escalateFeedback(for: camera.trackingState, inSeconds: 3.0)
case .normal:
statusViewController.cancelScheduledMessage(for: .trackingStateEscalation)
// Unhide content after successful relocalization.
setOverlaysHidden(false)
}
}
func session(_ session: ARSession, didFailWithError error: Error) {
guard error is ARError else { return }
let errorWithInfo = error as NSError
let messages = [
errorWithInfo.localizedDescription,
errorWithInfo.localizedFailureReason,
errorWithInfo.localizedRecoverySuggestion
]
// Filter out optional error messages.
let errorMessage = messages.compactMap({ $0 }).joined(separator: "\n")
DispatchQueue.main.async {
self.displayErrorMessage(title: "The AR session failed.", message: errorMessage)
}
}
func sessionWasInterrupted(_ session: ARSession) {
setOverlaysHidden(true)
}
func sessionShouldAttemptRelocalization(_ session: ARSession) -> Bool {
/*
Allow the session to attempt to resume after an interruption.
This process may not succeed, so the app must be prepared
to reset the session if the relocalizing status continues
for a long time -- see `escalateFeedback` in `StatusViewController`.
*/
return true
}
private func setOverlaysHidden(_ shouldHide: Bool) {
sceneView.scene!.children.forEach { node in
if shouldHide {
// Hide overlay content immediately during relocalization.
node.alpha = 0
} else {
// Fade overlay content in after relocalization succeeds.
node.run(.fadeIn(withDuration: 0.5))
}
}
}
private func restartSession() {
statusViewController.cancelAllScheduledMessages()
statusViewController.showMessage("RESTARTING SESSION")
anchorLabels = [UUID: String]()
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - Error handling
private func displayErrorMessage(title: String, message: String) {
// Present an alert informing about the error that has occurred.
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let restartAction = UIAlertAction(title: "Restart Session", style: .default) { _ in
alertController.dismiss(animated: true, completion: nil)
self.restartSession()
}
alertController.addAction(restartAction)
present(alertController, animated: true, completion: nil)
}
}
What this would indicate is that your class StatusViewController doesn't inherit from UIViewController, as the property of children has been available to a subclass of UIViewController for quite some time.
Are you able to share how you have composed your StatusViewController?

Resources