Picture in Picture from AVSampleBufferDisplayLayer not loading - ios

I'm trying to support Picture in Picture for my iOS app and I need to display the content of a view, not a video. So I tried to use a library to record a view and show the video in a AVSampleBufferDisplayLayer. It works, the content of the view is displayed in the buffer display layer, but when I try to use PIP, only a loading indicator is shown. Here is my code:
import UIKit
import AVKit
class View: UIView {
override class var layerClass: AnyClass {
AVSampleBufferDisplayLayer.self
}
}
class ViewController: UIViewController, AVPictureInPictureSampleBufferPlaybackDelegate {
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, setPlaying playing: Bool) {
}
func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
.init(start: .zero, duration: self.buffers.first?.duration ?? .indefinite)
}
func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
false
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) {
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime, completion completionHandler: #escaping () -> Void) {
}
#IBOutlet weak var playerView: View!
#IBOutlet weak var textView: UITextView!
var pipController: AVPictureInPictureController?
var glimpse: Glimpse!
var isRecording = false
var buffers = [CMSampleBuffer]()
#IBAction func pip() {
pipController?.startPictureInPicture()
}
func startRecording() {
glimpse = Glimpse()
glimpse.startRecording(textView, withCallback: { url in
if let url = url {
do {
DispatchQueue.main.async {
(self.playerView.layer as! AVSampleBufferDisplayLayer).flush()
if self.pipController == nil {
self.pipController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: self.playerView.layer as! AVSampleBufferDisplayLayer, playbackDelegate: self))
self.pipController?.requiresLinearPlayback = true
}
}
let reader = try AVAssetReader(asset: AVAsset(url: url))
let output = AVAssetReaderTrackOutput(track: reader.asset.tracks.first!, outputSettings: nil)
reader.add(output)
reader.startReading()
while let buffer = output.copyNextSampleBuffer() {
self.buffers.append(buffer)
}
try FileManager.default.removeItem(at: url)
} catch {
print(error)
}
}
})
isRecording = true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
var i = 0
_ = Timer.scheduledTimer(withTimeInterval: 1, repeats: true, block: { _ in
self.textView.text += "Hello World! (\(i))\n"
if self.isRecording {
self.glimpse.stop()
self.startRecording()
}
i += 1
})
let layer = playerView.layer as! AVSampleBufferDisplayLayer
layer.requestMediaDataWhenReady(on: .global()) {
if let buffer = self.buffers.first {
layer.enqueue(buffer)
self.buffers.remove(at: 0)
}
}
startRecording()
}
}
In this example, I modify the content of a UITextView every second and I record a video of it. Then I extract the CMSampleBuffers to display them in the AVSampleBufferDisplayLayer.
I attached two screenshots, the first shows how the content of the text view is successfully shown in the AVSampleBufferDisplayLayer and the second shows how nothing is displayed when PIP is enabled.
What am I doing wrong?

I have experienced the same behavior when returning incorrect time range for playback. Make sure you return .positiveInfinity for duration otherwise your layer will be covered with the loading indicator.
func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
return CMTimeRange(start: .negativeInfinity, duration: .positiveInfinity)
}
Documented here:
https://developer.apple.com/documentation/avkit/avpictureinpicturesamplebufferplaybackdelegate/3750337-pictureinpicturecontrollertimera?changes=la

I have something like this working in Secure ShellFish and it made a difference how the CMSampleBuffers was created.
I had to create the CMSampleBuffer from a CVPixelBuffer that was IOSurface compatible and I had to mark the CMSampleBuffer with kCMSampleAttachmentKey_DisplayImmediately.

Related

how to live a live application screen having camera view with some other UIViews over Camera view

Actually, I want to broadcast a live match with some overlays over it like sponsors images on top corners of the screen and a score card on the bottom of the screen. Can someone help me or guide me on a way of implementation I use this pod (haishinkit) but this pod is not serving the purpose. I use rtmpstream.attachScreen function for broadcasting my UIView but this function is not picking up my camera view (AVCaptureVideoPreviewLayer) other than this scorecard and sponsor images are broadcasting. I want to broadcast my Camera Screen along with Scorecard, other images along with the audio.
import UIKit
import HaishinKit
import AVFoundation
import VideoToolbox
import Loaf
import WebKit
class BroadcastViewController: UIViewController, RTMPStreamDelegate {
// Camera Preview View
#IBOutlet private weak var previewView: UIView!
#IBOutlet weak var videoView: UIView!
// Camera Selector
#IBOutlet weak var cameraSelector: UISegmentedControl!
#IBOutlet weak var webview: WKWebView!
// Go Live Button
#IBOutlet weak var startStopButton: UIButton!
// FPS and Bitrate Labels
#IBOutlet weak var fpsLabel: UILabel!
#IBOutlet weak var bitrateLabel: UILabel!
// RTMP Connection & RTMP Stream
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
// Default Camera
private var defaultCamera: AVCaptureDevice.Position = .back
// Flag indicates if we should be attempting to go live
private var liveDesired = false
// Reconnect attempt tracker
private var reconnectAttempt = 0
// The RTMP Stream key to broadcast to.
public var streamKey: String!
// The Preset to use
public var preset: Preset!
// A tracker of the last time we changed the bitrate in ABR
private var lastBwChange = 0
// The RTMP endpoint
let rtmpEndpoint = "rtmps://live-api-s.facebook.com:443/rtmp/"
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
var isPublic = false
// Some basic presets for live streaming
enum Preset {
case hd_1080p_30fps_5mbps
case hd_720p_30fps_3mbps
case sd_540p_30fps_2mbps
case sd_360p_30fps_1mbps
}
// An encoding profile - width, height, framerate, video bitrate
private class Profile {
public var width : Int = 0
public var height : Int = 0
public var frameRate : Int = 0
public var bitrate : Int = 0
init(width: Int, height: Int, frameRate: Int, bitrate: Int) {
self.width = width
self.height = height
self.frameRate = frameRate
self.bitrate = bitrate
}
}
// Converts a Preset to a Profile
private func presetToProfile(preset: Preset) -> Profile {
switch preset {
case .hd_1080p_30fps_5mbps:
return Profile(width: 1920, height: 1080, frameRate: 30, bitrate: 5000000)
case .hd_720p_30fps_3mbps:
return Profile(width: 1280, height: 720, frameRate: 30, bitrate: 3000000)
case .sd_540p_30fps_2mbps:
return Profile(width: 960, height: 540, frameRate: 30, bitrate: 2000000)
case .sd_360p_30fps_1mbps:
return Profile(width: 640, height: 360, frameRate: 30, bitrate: 1000000)
}
}
// Configures the live stream
private func configureStream(preset: Preset) {
let profile = presetToProfile(preset: preset)
// Configure the capture settings from the camera
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1920x1080,
.continuousAutofocus: true,
.continuousExposure: true,
.fps: profile.frameRate
]
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
// let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = .landscapeRight
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height,
.bitrate: profile.bitrate,
.profileLevel: kVTProfileLevel_H264_Main_AutoLevel,
.maxKeyFrameIntervalDuration: 2, // 2 seconds
]
}
} else {
// Fallback on earlier versions
}
// Configure the RTMP audio stream
// rtmpStream.audioSettings = [
// .bitrate: 128000 // Always use 128kbps
// ]
}
// Publishes the live stream
private func publishStream() {
print("Calling publish()")
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: previewView))
rtmpStream.publish("minestreamkey")
DispatchQueue.main.async {
self.startStopButton.setTitle("Stop Streaming!", for: .normal)
}
}
// Triggers and attempt to connect to an RTMP hostname
private func connectRTMP() {
print("Calling connect()")
rtmpConnection.connect(rtmpEndpoint)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// videoView.startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
self.setupAVCapture()
previewView.bringSubviewToFront(webview)
webview.load(NSURLRequest(url: NSURL(string: "https://graphics.crickslab.com/scorecard/0865e840-f147-11eb-95cb-65228ef0512c/Blitzz-vs-Crickslab-Officials-Fri30Jul2021-1201AM-")! as URL) as URLRequest)
print("Broadcast View Controller Init")
print("Stream Key: " + "FB-3940543509404805-0-AbxeU6r48NpFcasH")
// Work out the orientation of the device, and set this on the RTMP Stream
rtmpStream = RTMPStream(connection: rtmpConnection)
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
}
} else {
// Fallback on earlier versions
}
// And a listener for orientation changes
// Note: Changing the orientation once the stream has been started will not change the orientation of the live stream, only the preview.
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
// Configure the encoder profile
configureStream(preset: self.preset)
// Attatch to the default audio device
// rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
// print(error.description)
// }
//
// // Attatch to the default camera
// rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera)) { error in
// print(error.description)
// }
// Register a tap gesture recogniser so we can use tap to focus
let tap = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
previewView.addGestureRecognizer(tap)
previewView.isUserInteractionEnabled = true
// Attatch the preview view
// previewView?.attachStream(rtmpStream)
// Add event listeners for RTMP status changes and IO Errors
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpStream.delegate = self
startStopButton.setTitle("Go Live!", for: .normal)
}
// 👉📱 Tap to focus / exposure
#objc func handleTap(_ sender: UITapGestureRecognizer) {
if sender.state == UIGestureRecognizer.State.ended {
let point = sender.location(in: previewView)
let pointOfInterest = CGPoint(x: point.x / previewView.bounds.size.width, y: point.y / previewView.bounds.size.height)
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
// Triggered when the user tries to change camera
#IBAction func changeCameraToggle(_ sender: UISegmentedControl) {
switch cameraSelector.selectedSegmentIndex
{
case 0:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.back))
case 1:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.front))
default:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera))
}
}
// Triggered when the user taps the go live button
#IBAction func goLiveButton(_ sender: UIButton) {
print("Go Live Button tapped!")
if !liveDesired {
if rtmpConnection.connected {
// If we're already connected to the RTMP server, wr can just call publish() to start the stream
publishStream()
} else {
// Otherwise, we need to setup the RTMP connection and wait for a callback before we can safely
// call publish() to start the stream
connectRTMP()
}
// Modify application state to streaming
liveDesired = true
startStopButton.setTitle("Connecting...", for: .normal)
} else {
// Unpublish the live stream
rtmpStream.close()
// Modify application state to idle
liveDesired = false
startStopButton.setTitle("Go Live!", for: .normal)
}
}
// Called when the RTMPStream or RTMPConnection changes status
#objc
private func rtmpStatusHandler(_ notification: Notification) {
print("RTMP Status Handler called.")
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
// Send a nicely styled notification about the RTMP Status
var loafStyle = Loaf.State.info
switch code {
case RTMPConnection.Code.connectSuccess.rawValue, RTMPStream.Code.publishStart.rawValue, RTMPStream.Code.unpublishSuccess.rawValue:
loafStyle = Loaf.State.success
case RTMPConnection.Code.connectFailed.rawValue:
loafStyle = Loaf.State.error
case RTMPConnection.Code.connectClosed.rawValue:
loafStyle = Loaf.State.warning
default:
break
}
DispatchQueue.main.async {
Loaf("RTMP Status: " + code, state: loafStyle, location: .top, sender: self).show(.short)
}
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
reconnectAttempt = 0
if liveDesired {
// Publish our stream to our stream key
publishStream()
}
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
print("RTMP Connection was not successful.")
// Retry the connection if "live" is still the desired state
if liveDesired {
reconnectAttempt += 1
DispatchQueue.main.async {
self.startStopButton.setTitle("Reconnect attempt " + String(self.reconnectAttempt) + " (Cancel)" , for: .normal)
}
// Retries the RTMP connection every 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
self.connectRTMP()
}
}
default:
break
}
}
// Called when there's an RTMP Error
#objc
private func rtmpErrorHandler(_ notification: Notification) {
print("RTMP Error Handler called.")
}
// Called when the device changes rotation
#objc
private func on(_ notification: Notification) {
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
// Do not change the outpur rotation if the stream has already started.
if liveDesired == false {
let profile = presetToProfile(preset: self.preset)
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height
]
}
}
} else {
// Fallback on earlier versions
}
}
// Button tapped to return to the configuration screen
#IBAction func closeButton(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
// RTMPStreamDelegate callbacks
func rtmpStreamDidClear(_ stream: RTMPStream) {
}
// Statistics callback
func rtmpStream(_ stream: RTMPStream, didStatics connection: RTMPConnection) {
DispatchQueue.main.async {
self.fpsLabel.text = String(stream.currentFPS) + " fps"
self.bitrateLabel.text = String((connection.currentBytesOutPerSecond / 125)) + " kbps"
}
}
// Insufficient bandwidth callback
func rtmpStream(_ stream: RTMPStream, didPublishInsufficientBW connection: RTMPConnection) {
print("ABR: didPublishInsufficientBW")
// If we last changed bandwidth over 10 seconds ago
if (Int(NSDate().timeIntervalSince1970) - lastBwChange) > 5 {
print("ABR: Will try to change bitrate")
// Reduce bitrate by 30% every 10 seconds
let b = Double(stream.videoSettings[.bitrate] as! UInt32) * Double(0.7)
print("ABR: Proposed bandwidth: " + String(b))
stream.videoSettings[.bitrate] = b
lastBwChange = Int(NSDate().timeIntervalSince1970)
DispatchQueue.main.async {
Loaf("Insuffient Bandwidth, changing video bandwidth to: " + String(b), state: Loaf.State.warning, location: .top, sender: self).show(.short)
}
} else {
print("ABR: Still giving grace time for last bandwidth change")
}
}
// Today this example doesn't attempt to increase bandwidth to find a sweet spot.
// An implementation might be to gently increase bandwidth by a few percent, but that's hard without getting into an aggressive cycle.
func rtmpStream(_ stream: RTMPStream, didPublishSufficientBW connection: RTMPConnection) {
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension BroadcastViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice
.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
for: .video,
position: AVCaptureDevice.Position.back) else {
return
}
captureDevice = device
beginSession()
}
func beginSession(){
var deviceInput: AVCaptureDeviceInput!
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
guard deviceInput != nil else {
print("error: cant get deviceInput")
return
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connection(with: .video)?.isEnabled = true
previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
// let rootLayer :CALayer = self.previewView.layer
self.videoView.layer.masksToBounds=true
previewLayer.frame = videoView.bounds
videoView.layer.addSublayer(self.previewLayer)
session.startRunning()
} catch let error as NSError {
deviceInput = nil
print("error: \(error.localizedDescription)")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// do stuff here
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings = [
.width: dimensions.width,
.height: dimensions.height ,
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
]
}
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video)
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
I have found a way to live stream camera view with overlays on it by creating 2 RTMPStream objects, one for attaching the camera and the second one is for attachscreen. following is the code.
import AVFoundation
import HaishinKit
import Photos
import UIKit
import VideoToolbox
import WebKit
final class ExampleRecorderDelegate: DefaultAVRecorderDelegate {
static let `default` = ExampleRecorderDelegate()
override func didFinishWriting(_ recorder: AVRecorder) {
guard let writer: AVAssetWriter = recorder.writer else {
return
}
PHPhotoLibrary.shared().performChanges({() -> Void in
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: writer.outputURL)
}, completionHandler: { _, error -> Void in
do {
try FileManager.default.removeItem(at: writer.outputURL)
} catch {
print(error)
}
})
}
}
final class LiveViewController: UIViewController {
private static let maxRetryCount: Int = 5
#IBOutlet private weak var lfView: MTHKView!
#IBOutlet private weak var currentFPSLabel: UILabel!
#IBOutlet private weak var publishButton: UIButton!
#IBOutlet private weak var pauseButton: UIButton!
#IBOutlet private weak var videoBitrateLabel: UILabel!
#IBOutlet private weak var videoBitrateSlider: UISlider!
#IBOutlet private weak var audioBitrateLabel: UILabel!
#IBOutlet private weak var zoomSlider: UISlider!
#IBOutlet private weak var audioBitrateSlider: UISlider!
#IBOutlet private weak var fpsControl: UISegmentedControl!
#IBOutlet private weak var effectSegmentControl: UISegmentedControl!
#IBOutlet weak var webview: WKWebView!
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
private var rtmpStreamLayer: RTMPStream!
private var sharedObject: RTMPSharedObject!
private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStreamLayer = RTMPStream(connection: rtmpConnection)
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.orientation = orientation
}
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStreamLayer.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
rtmpStreamLayer.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didEnterBackground(_:)), name: UIApplication.didEnterBackgroundNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didBecomeActive(_:)), name: UIApplication.didBecomeActiveNotification, object: nil)
}
override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
logger.warn(error.description)
}
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStream.attachCamera(DeviceUtil.device(withPosition: currentPosition)) { error in
logger.warn(error.description)
}
rtmpStreamLayer.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStreamLayer.receiveAudio = false
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
lfView?.attachStream(rtmpStream)
lfView?.attachStream(rtmpStreamLayer)
}
override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear")
super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close()
rtmpStream.dispose()
}
#IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
rtmpStream.captureSettings[.isVideoMirrored] = position == .front
rtmpStream.attachCamera(DeviceUtil.device(withPosition: position)) { error in
logger.warn(error.description)
}
currentPosition = position
}
#IBAction func toggleTorch(_ sender: UIButton) {
rtmpStream.torch.toggle()
}
#IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings[.bitrate] = slider.value * 1000
}
if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings[.bitrate] = slider.value * 1000
}
if slider == zoomSlider {
rtmpStream.setZoomFactor(CGFloat(slider.value), ramping: true, withRate: 5.0)
}
}
#IBAction func on(pause: UIButton) {
rtmpStream.paused.toggle()
}
#IBAction func on(close: UIButton) {
self.dismiss(animated: true, completion: nil)
}
#IBAction func on(publish: UIButton) {
if publish.isSelected {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
publish.setTitle("●", for: [])
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
publish.setTitle("■", for: [])
}
publish.isSelected.toggle()
}
#objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
logger.info(code)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
retryCount = 0
rtmpStream!.publish("yourstreamkey")
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2)
{
self.rtmpStreamLayer!.publish("yourstreamkey")
}
// sharedObject!.connect(rtmpConnection)
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
guard retryCount <= LiveViewController.maxRetryCount else {
return
}
Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount)))
rtmpConnection.connect(Preference.defaultInstance.uri!)
retryCount += 1
default:
break
}
}
#objc
private func rtmpErrorHandler(_ notification: Notification) {
logger.error(notification)
rtmpConnection.connect(Preference.defaultInstance.uri!)
}
func tapScreen(_ gesture: UIGestureRecognizer) {
if let gestureView = gesture.view, gesture.state == .ended {
let touchPoint: CGPoint = gesture.location(in: gestureView)
let pointOfInterest = CGPoint(x: touchPoint.x / gestureView.bounds.size.width, y: touchPoint.y / gestureView.bounds.size.height)
print("pointOfInterest: \(pointOfInterest)")
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
#IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) {
switch segment.selectedSegmentIndex {
case 0:
rtmpStream.captureSettings[.fps] = 15.0
case 1:
rtmpStream.captureSettings[.fps] = 30.0
case 2:
rtmpStream.captureSettings[.fps] = 60.0
default:
break
}
}
#IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) {
if let currentEffect: VideoEffect = currentEffect {
_ = rtmpStream.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
default:
break
}
}
#objc
private func on(_ notification: Notification) {
guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else {
return
}
rtmpStream.orientation = orientation
}
#objc
private func didEnterBackground(_ notification: Notification) {
// rtmpStream.receiveVideo = false
}
#objc
private func didBecomeActive(_ notification: Notification) {
// rtmpStream.receiveVideo = true
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if Thread.isMainThread {
currentFPSLabel?.text = "\(rtmpStream.currentFPS)"
}
}
}
extension LiveViewController : UIWebViewDelegate
{
func webViewDidFinishLoad(_ webView: UIWebView) {
webview.scrollView.zoomScale = 10
}
}

How to fix AVCapturevideopreviewlayer being smaller than actual camera view

I'm working on an app that periodically takes pictures as part of a research job but I'm new to OOP and swift and am a little confused on what can cause this issue. I think it's because the UIView's size is smaller than the camera view size and it's getting cut out when displaying and I'm not sure how to program it to adapt to the UIView's dimensions. Here's my code:
Video Preview Captured image
import UIKit
import AVFoundation
class SecondViewController: UIViewController {
//Creates session between camera input and data output
let session = AVCaptureSession()
var camera : AVCaptureDevice?
var cameraPreviewLayer : AVCaptureVideoPreviewLayer?
var cameraCaptureOutput : AVCapturePhotoOutput?
//Connects between this code document and Story Board
#IBOutlet weak var Time: UITextField!
#IBOutlet weak var Start: UIButton!
#IBOutlet weak var CameraView: UIView!
//Misc Variables
var alert: UIAlertController!
var sPhoto : UIImage?
var completionHandler : ((UIImage?) -> Void)?
var timerCount:Bool = false
var timer:Timer = Timer()
override func viewDidLoad() {
initializeCaptureSession()
super.viewDidLoad()
//assigns delegates to self
Time.delegate = self
}
//Brings down Time keypad when any area other than keypad is touched
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
Time.resignFirstResponder()
}
func initializeCaptureSession(){
//Set's sessions presets
session.sessionPreset = AVCaptureSession.Preset.photo
//Initalize Camera
camera = AVCaptureDevice.default(for: AVMediaType.video)
do{
if(camera == nil){
print("No Camera Detected")
}
else{
let cameraCaptureInput = try AVCaptureDeviceInput(device: camera!)
//Set's Camera Output
cameraCaptureOutput = AVCapturePhotoOutput()
session.addInput(cameraCaptureInput)
session.addOutput(cameraCaptureOutput!)
}
} catch{
print(error.localizedDescription)
}
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
let rootLayer: CALayer = self.CameraView.layer
rootLayer.masksToBounds=false
cameraPreviewLayer?.frame = rootLayer.bounds
rootLayer.addSublayer(self.cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
session.startRunning()
}
//Function that creates alert that dismisses
func notifyUser(message: String) -> Void
{
let alert = UIAlertController(title: "", message: message, preferredStyle: UIAlertController.Style.alert)
present(alert, animated: true, completion: nil)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [unowned self] in
self.dismiss(animated: true)
}
}
#IBAction func StartPressed(_ sender: Any) {
if(Time.text!.isEmpty == true){
notifyUser(message: "Please enter a interval")
}
else{
if(timerCount){
timerCount = false
Start.setTitle("Start", for: .normal)
Start.backgroundColor = UIColor.green
timer.invalidate()
}
else{
timerCount = true
Start.setTitle("Stop", for: .normal)
Start.backgroundColor = UIColor.red
timer = Timer.scheduledTimer(withTimeInterval: Double(Time.text!)!, repeats: true) { [weak self] timer in
self?.takePicture()
}
}
}
}
func takePicture() {
notifyUser(message: "Image Captured")
//This is where you declare settings for the camera
let settings = AVCapturePhotoSettings()
settings.flashMode = .auto
//Actually takes the photo
cameraCaptureOutput?.capturePhoto(with: settings, delegate: self)
}
}
//Extensions
extension SecondViewController : UITextFieldDelegate{
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
return true
}
}
extension SecondViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
//If photo failed to be captured
guard error == nil else{
print("Failed to capture photo")
print(error?.localizedDescription as Any)
return
}
//If pixel buffer could not be converted to image data
guard let imageData = photo.fileDataRepresentation() else {
print("Fail to convert image data to UIImage")
return
}
//If the UIImage could not be initalized with image data
guard let capturedImage = UIImage.init(data: imageData, scale: 1.0) else{
print("fail to convert image data to UIImage")
return
}
UIImageWriteToSavedPhotosAlbum(capturedImage, nil, nil, nil)
//displayCapturedPhoto(capturedPhoto: imageToSave)
}
}
I've seen on other posts that the AVLayerVideoGravity.resizeAspectFill has fixed it for some users so any explanations as to why that's not working would be extremely helpful - Much thanks in advance!!!

Metronome ios swift beat visuals lag

I'm trying to create an metronome app by implementing the sample code provided by apple. Everything works fine but i'm seeing an delay in the beat visuals its not properly synchronised with the player time. Here is the sample code provided by apple
let secondsPerBeat = 60.0 / tempoBPM
let samplesPerBeat = Float(secondsPerBeat * Float(bufferSampleRate))
let beatSampleTime: AVAudioFramePosition = AVAudioFramePosition(nextBeatSampleTime)
let playerBeatTime: AVAudioTime = AVAudioTime(sampleTime: AVAudioFramePosition(beatSampleTime), atRate: bufferSampleRate)
// This time is relative to the player's start time.
player.scheduleBuffer(soundBuffer[bufferNumber]!, at: playerBeatTime, options: AVAudioPlayerNodeBufferOptions(rawValue: 0), completionHandler: {
self.syncQueue!.sync() {
self.beatsScheduled -= 1
self.bufferNumber ^= 1
self.scheduleBeats()
}
})
beatsScheduled += 1
if (!playerStarted) {
// We defer the starting of the player so that the first beat will play precisely
// at player time 0. Having scheduled the first beat, we need the player to be running
// in order for nodeTimeForPlayerTime to return a non-nil value.
player.play()
playerStarted = true
}
let callbackBeat = beatNumber
beatNumber += 1
// calculate the beattime for animating the UI based on the playerbeattime.
let nodeBeatTime: AVAudioTime = player.nodeTime(forPlayerTime: playerBeatTime)!
let output: AVAudioIONode = engine.outputNode
let latencyHostTicks: UInt64 = AVAudioTime.hostTime(forSeconds: output.presentationLatency)
//calcualte the final dispatch time which will update the UI in particualr intervals
let dispatchTime = DispatchTime(uptimeNanoseconds: nodeBeatTime.hostTime + latencyHostTicks)**
// Visuals.
DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: dispatchTime) {
if (self.isPlaying) {
// send current call back beat.
self.delegate!.metronomeTicking!(self, bar: (callbackBeat / 4) + 1, beat: (callbackBeat % 4) + 1)
}
}
}
// my view controller class where i'm showing the beat number
class ViewController: UIViewController ,UIGestureRecognizerDelegate,Metronomedelegate{
#IBOutlet var rhythmlabel: UILabel!
//view did load method
override func viewDidLoad() {
}
//delegate method for getting the beat value from metronome engine and showing in the UI label.
func metronomeTicking(_ metronome: Metronome, bar: Int, beat: Int) {
DispatchQueue.main.async {
print("Playing Beat \(beat)")
//show beat in label
self.rhythmlabel.text = "\(beat)"
}
}
}
I think you are approaching this a bit too complex for no reason. All you really need is to set a DispatchTime when you start the metronome, and fire a function call whenever the DispatchTime is up, update the dispatch time based on the desired frequency, and loop as long as the metronome is enabled.
I prepared a project for you which implements this method so you can play with and use as you see fit: https://github.com/ekscrypto/Swift-Tutorial-Metronome
Good luck!
Metronome.swift
import Foundation
import AVFoundation
class Metronome {
var bpm: Float = 60.0 { didSet {
bpm = min(300.0,max(30.0,bpm))
}}
var enabled: Bool = false { didSet {
if enabled {
start()
} else {
stop()
}
}}
var onTick: ((_ nextTick: DispatchTime) -> Void)?
var nextTick: DispatchTime = DispatchTime.distantFuture
let player: AVAudioPlayer = {
do {
let soundURL = Bundle.main.url(forResource: "metronome", withExtension: "wav")!
let soundFile = try AVAudioFile(forReading: soundURL)
let player = try AVAudioPlayer(contentsOf: soundURL)
return player
} catch {
print("Oops, unable to initialize metronome audio buffer: \(error)")
return AVAudioPlayer()
}
}()
private func start() {
print("Starting metronome, BPM: \(bpm)")
player.prepareToPlay()
nextTick = DispatchTime.now()
tick()
}
private func stop() {
player.stop()
print("Stoping metronome")
}
private func tick() {
guard
enabled,
nextTick <= DispatchTime.now()
else { return }
let interval: TimeInterval = 60.0 / TimeInterval(bpm)
nextTick = nextTick + interval
DispatchQueue.main.asyncAfter(deadline: nextTick) { [weak self] in
self?.tick()
}
player.play(atTime: interval)
onTick?(nextTick)
}
}
ViewController.swift
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var bpmLabel: UILabel!
#IBOutlet weak var tickLabel: UILabel!
let myMetronome = Metronome()
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
myMetronome.onTick = { (nextTick) in
self.animateTick()
}
updateBpm()
}
private func animateTick() {
tickLabel.alpha = 1.0
UIView.animate(withDuration: 0.35) {
self.tickLabel.alpha = 0.0
}
}
#IBAction func startMetronome(_: Any?) {
myMetronome.enabled = true
}
#IBAction func stopMetronome(_: Any?) {
myMetronome.enabled = false
}
#IBAction func increaseBpm(_: Any?) {
myMetronome.bpm += 1.0
updateBpm()
}
#IBAction func decreaseBpm(_: Any?) {
myMetronome.bpm -= 1.0
updateBpm()
}
private func updateBpm() {
let metronomeBpm = Int(myMetronome.bpm)
bpmLabel.text = "\(metronomeBpm)"
}
}
Note: There seems to be a pre-loading issue, the prepareToPlay() doesn't fully load the audio file before playing and it causes some timing issue with the first playback of the tick audio file. This issue will be left to the reader to figure out. The original question being synchronization, this should be demonstrated in the code above.

Swift: Instance member cannot be used on type in ARKitVision example

The Apple ARKitVision example has the following declaration in the ViewController.swift file:
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
However, if I copy the same views and source files and incorporate them into another test storyboard/project I get the error message "Instance member 'children' cannot be used on type 'StatusViewController'"
So, why does this work on the ARKitVision example but it does not work if I set it up myself from scratch? What else is the ARKitVision example doing to get this working? Thanks 😊
The complete class definition for StatusViewController is:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Utility class for showing messages above the AR view.
*/
import Foundation
import ARKit
/**
Displayed at the top of the main interface of the app that allows users to see
the status of the AR experience, as well as the ability to control restarting
the experience altogether.
- Tag: StatusViewController
*/
class StatusViewController: UIViewController {
// MARK: - Types
enum MessageType {
case trackingStateEscalation
case planeEstimation
case contentPlacement
case focusSquare
static var all: [MessageType] = [
.trackingStateEscalation,
.planeEstimation,
.contentPlacement,
.focusSquare
]
}
// MARK: - IBOutlets
#IBOutlet weak private var messagePanel: UIVisualEffectView!
#IBOutlet weak private var messageLabel: UILabel!
#IBOutlet weak private var restartExperienceButton: UIButton!
// MARK: - Properties
/// Trigerred when the "Restart Experience" button is tapped.
var restartExperienceHandler: () -> Void = {}
/// Seconds before the timer message should fade out. Adjust if the app needs longer transient messages.
private let displayDuration: TimeInterval = 6
// Timer for hiding messages.
private var messageHideTimer: Timer?
private var timers: [MessageType: Timer] = [:]
// MARK: - Message Handling
func showMessage(_ text: String, autoHide: Bool = true) {
// Cancel any previous hide timer.
messageHideTimer?.invalidate()
messageLabel.text = text
// Make sure status is showing.
setMessageHidden(false, animated: true)
if autoHide {
messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
self?.setMessageHidden(true, animated: true)
})
}
}
func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
cancelScheduledMessage(for: messageType)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
self?.showMessage(text)
timer.invalidate()
})
timers[messageType] = timer
}
func cancelScheduledMessage(`for` messageType: MessageType) {
timers[messageType]?.invalidate()
timers[messageType] = nil
}
func cancelAllScheduledMessages() {
for messageType in MessageType.all {
cancelScheduledMessage(for: messageType)
}
}
// MARK: - ARKit
func showTrackingQualityInfo(for trackingState: ARCamera.TrackingState, autoHide: Bool) {
showMessage(trackingState.presentationString, autoHide: autoHide)
}
func escalateFeedback(for trackingState: ARCamera.TrackingState, inSeconds seconds: TimeInterval) {
cancelScheduledMessage(for: .trackingStateEscalation)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [unowned self] _ in
self.cancelScheduledMessage(for: .trackingStateEscalation)
var message = trackingState.presentationString
if let recommendation = trackingState.recommendation {
message.append(": \(recommendation)")
}
self.showMessage(message, autoHide: false)
})
timers[.trackingStateEscalation] = timer
}
// MARK: - IBActions
#IBAction private func restartExperience(_ sender: UIButton) {
restartExperienceHandler()
}
// MARK: - Panel Visibility
private func setMessageHidden(_ hide: Bool, animated: Bool) {
// The panel starts out hidden, so show it before animating opacity.
messagePanel.isHidden = false
guard animated else {
messagePanel.alpha = hide ? 0 : 1
return
}
UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
self.messagePanel.alpha = hide ? 0 : 1
}, completion: nil)
}
}
extension ARCamera.TrackingState {
var presentationString: String {
switch self {
case .notAvailable:
return "TRACKING UNAVAILABLE"
case .normal:
return "TRACKING NORMAL"
case .limited(.excessiveMotion):
return "TRACKING LIMITED\nExcessive motion"
case .limited(.insufficientFeatures):
return "TRACKING LIMITED\nLow detail"
case .limited(.initializing):
return "Initializing"
case .limited(.relocalizing):
return "Recovering from interruption"
}
}
var recommendation: String? {
switch self {
case .limited(.excessiveMotion):
return "Try slowing down your movement, or reset the session."
case .limited(.insufficientFeatures):
return "Try pointing at a flat surface, or reset the session."
case .limited(.relocalizing):
return "Return to the location where you left off or try resetting the session."
default:
return nil
}
}
}
The definition of the ViewController class is:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Main view controller for the ARKitVision sample.
*/
import UIKit
import SpriteKit
import ARKit
import Vision
class ViewController: UIViewController, UIGestureRecognizerDelegate, ARSKViewDelegate, ARSessionDelegate {
#IBOutlet weak var sceneView: ARSKView!
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
// MARK: - View controller lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// Configure and present the SpriteKit scene that draws overlay content.
let overlayScene = SKScene()
overlayScene.scaleMode = .aspectFill
sceneView.delegate = self
sceneView.presentScene(overlayScene)
sceneView.session.delegate = self
// Hook up status view controller callback.
statusViewController.restartExperienceHandler = { [unowned self] in
self.restartSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSessionDelegate
// Pass camera frames received from ARKit to Vision (when not already processing one)
/// - Tag: ConsumeARFrames
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Do not enqueue other buffers for processing while another Vision task is still running.
// The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
// Retain the image buffer for Vision processing.
self.currentBuffer = frame.capturedImage
classifyCurrentImage()
}
// MARK: - Vision classification
// Vision classification request and model
/// - Tag: ClassificationRequest
private lazy var classificationRequest: VNCoreMLRequest = {
do {
// Instantiate the model from its generated Swift class.
let model = try VNCoreMLModel(for: Inceptionv3().model)
let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
// Crop input images to square area at center, matching the way the ML model was trained.
request.imageCropAndScaleOption = .centerCrop
// Use CPU for Vision processing to ensure that there are adequate GPU resources for rendering.
request.usesCPUOnly = true
return request
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
// The pixel buffer being held for analysis; used to serialize Vision requests.
private var currentBuffer: CVPixelBuffer?
// Queue for dispatching vision classification requests
private let visionQueue = DispatchQueue(label: "com.example.apple-samplecode.ARKitVision.serialVisionQueue")
// Run the Vision+ML classifier on the current image buffer.
/// - Tag: ClassifyCurrentImage
private func classifyCurrentImage() {
// Most computer vision tasks are not rotation agnostic so it is important to pass in the orientation of the image with respect to device.
let orientation = CGImagePropertyOrientation(UIDevice.current.orientation)
let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer!, orientation: orientation)
visionQueue.async {
do {
// Release the pixel buffer when done, allowing the next buffer to be processed.
defer { self.currentBuffer = nil }
try requestHandler.perform([self.classificationRequest])
} catch {
print("Error: Vision request failed with error \"\(error)\"")
}
}
}
// Classification results
private var identifierString = ""
private var confidence: VNConfidence = 0.0
// Handle completion of the Vision request and choose results to display.
/// - Tag: ProcessClassifications
func processClassifications(for request: VNRequest, error: Error?) {
guard let results = request.results else {
print("Unable to classify image.\n\(error!.localizedDescription)")
return
}
// The `results` will always be `VNClassificationObservation`s, as specified by the Core ML model in this project.
let classifications = results as! [VNClassificationObservation]
// Show a label for the highest-confidence result (but only above a minimum confidence threshold).
if let bestResult = classifications.first(where: { result in result.confidence > 0.5 }),
let label = bestResult.identifier.split(separator: ",").first {
identifierString = String(label)
confidence = bestResult.confidence
} else {
identifierString = ""
confidence = 0
}
DispatchQueue.main.async { [weak self] in
self?.displayClassifierResults()
}
}
// Show the classification results in the UI.
private func displayClassifierResults() {
guard !self.identifierString.isEmpty else {
return // No object was classified.
}
let message = String(format: "Detected \(self.identifierString) with %.2f", self.confidence * 100) + "% confidence"
statusViewController.showMessage(message)
}
// MARK: - Tap gesture handler & ARSKViewDelegate
// Labels for classified objects by ARAnchor UUID
private var anchorLabels = [UUID: String]()
// When the user taps, add an anchor associated with the current classification result.
/// - Tag: PlaceLabelAtLocation
#IBAction func placeLabelAtLocation(sender: UITapGestureRecognizer) {
let hitLocationInView = sender.location(in: sceneView)
let hitTestResults = sceneView.hitTest(hitLocationInView, types: [.featurePoint, .estimatedHorizontalPlane])
if let result = hitTestResults.first {
// Add a new anchor at the tap location.
let anchor = ARAnchor(transform: result.worldTransform)
sceneView.session.add(anchor: anchor)
// Track anchor ID to associate text with the anchor after ARKit creates a corresponding SKNode.
anchorLabels[anchor.identifier] = identifierString
}
}
// When an anchor is added, provide a SpriteKit node for it and set its text to the classification label.
/// - Tag: UpdateARContent
func view(_ view: ARSKView, didAdd node: SKNode, for anchor: ARAnchor) {
guard let labelText = anchorLabels[anchor.identifier] else {
fatalError("missing expected associated label for anchor")
}
let label = TemplateLabelNode(text: labelText)
node.addChild(label)
}
// MARK: - AR Session Handling
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
statusViewController.showTrackingQualityInfo(for: camera.trackingState, autoHide: true)
switch camera.trackingState {
case .notAvailable, .limited:
statusViewController.escalateFeedback(for: camera.trackingState, inSeconds: 3.0)
case .normal:
statusViewController.cancelScheduledMessage(for: .trackingStateEscalation)
// Unhide content after successful relocalization.
setOverlaysHidden(false)
}
}
func session(_ session: ARSession, didFailWithError error: Error) {
guard error is ARError else { return }
let errorWithInfo = error as NSError
let messages = [
errorWithInfo.localizedDescription,
errorWithInfo.localizedFailureReason,
errorWithInfo.localizedRecoverySuggestion
]
// Filter out optional error messages.
let errorMessage = messages.compactMap({ $0 }).joined(separator: "\n")
DispatchQueue.main.async {
self.displayErrorMessage(title: "The AR session failed.", message: errorMessage)
}
}
func sessionWasInterrupted(_ session: ARSession) {
setOverlaysHidden(true)
}
func sessionShouldAttemptRelocalization(_ session: ARSession) -> Bool {
/*
Allow the session to attempt to resume after an interruption.
This process may not succeed, so the app must be prepared
to reset the session if the relocalizing status continues
for a long time -- see `escalateFeedback` in `StatusViewController`.
*/
return true
}
private func setOverlaysHidden(_ shouldHide: Bool) {
sceneView.scene!.children.forEach { node in
if shouldHide {
// Hide overlay content immediately during relocalization.
node.alpha = 0
} else {
// Fade overlay content in after relocalization succeeds.
node.run(.fadeIn(withDuration: 0.5))
}
}
}
private func restartSession() {
statusViewController.cancelAllScheduledMessages()
statusViewController.showMessage("RESTARTING SESSION")
anchorLabels = [UUID: String]()
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - Error handling
private func displayErrorMessage(title: String, message: String) {
// Present an alert informing about the error that has occurred.
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let restartAction = UIAlertAction(title: "Restart Session", style: .default) { _ in
alertController.dismiss(animated: true, completion: nil)
self.restartSession()
}
alertController.addAction(restartAction)
present(alertController, animated: true, completion: nil)
}
}
What this would indicate is that your class StatusViewController doesn't inherit from UIViewController, as the property of children has been available to a subclass of UIViewController for quite some time.
Are you able to share how you have composed your StatusViewController?

How to take a picture using the proximity sensor?

I am having trouble getting the device to take an image using the rear view camera when the proximity sensor is enabled. I don't want the camera preview to show up, just want the device to take the photo and present it in the imageView. I have the proximity sensor working, and I am using imagePicker.takePicture() to take the image when the proximity sensor is enabled, but that doesn't seem to work. What is the method/function that I can use to programmatically take the picture without the user input.
This is my code so far:
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
#IBOutlet var imageView: UIImageView!
var imagePicker: UIImagePickerController!
//*The function in question*
func proximityChanged(notification: NSNotification) {
let device = notification.object as? UIDevice
if device?.proximityState == true {
print("\(device) detected!")
If you have troubles capturing photos with UIImagePickerController, I suggest using AVFoundation.
Below is a working example. Photo capture is triggered by the proximity sensor.
You can add a preview if you need it.
import UIKit
import AVFoundation
final class CaptureViewController: UIViewController {
#IBOutlet weak var imageView: UIImageView!
private static let captureSessionPreset = AVCaptureSessionPresetPhoto
private var captureSession: AVCaptureSession!
private var photoOutput: AVCaptureStillImageOutput!
private var initialized = false
override func viewDidLoad() {
super.viewDidLoad()
initialized = setupCaptureSession()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if initialized {
captureSession.startRunning()
UIDevice.currentDevice().proximityMonitoringEnabled = true
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(proximityStateDidChange), name: UIDeviceProximityStateDidChangeNotification, object: nil)
}
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
if initialized {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceProximityStateDidChangeNotification, object: nil)
UIDevice.currentDevice().proximityMonitoringEnabled = false
captureSession.stopRunning()
}
}
dynamic func proximityStateDidChange(notification: NSNotification) {
if UIDevice.currentDevice().proximityState {
captureImage()
}
}
// MARK: - Capture Image
private func captureImage() {
if let c = findConnection() {
photoOutput.captureStillImageAsynchronouslyFromConnection(c) { sampleBuffer, error in
if let jpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer),
let image = UIImage(data: jpeg)
{
dispatch_async(dispatch_get_main_queue()) { [weak self] in
self?.imageView.image = image
}
}
}
}
}
private func findConnection() -> AVCaptureConnection? {
for c in photoOutput.connections {
let c = c as? AVCaptureConnection
for p in c?.inputPorts ?? [] {
if p.mediaType == AVMediaTypeVideo {
return c
}
}
}
return nil
}
// MARK: - Setup Capture Session
private func setupCaptureSession() -> Bool {
captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(CaptureViewController.captureSessionPreset) {
captureSession.sessionPreset = CaptureViewController.captureSessionPreset
if setupCaptureSessionInput() && setupCaptureSessionOutput() {
return true
}
}
return false
}
private func setupCaptureSessionInput() -> Bool {
if let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo),
let captureDeviceInput = try? AVCaptureDeviceInput.init(device: captureDevice)
{
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
return true
}
}
return false
}
private func setupCaptureSessionOutput() -> Bool {
photoOutput = AVCaptureStillImageOutput()
photoOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
return true
}
return false
}
}

Resources