AVPlayerItem.loadedTimeRanges notifications aren't working with custom AVAssetResourceLoaderDelegate - ios

I use AVAssetResourceLoaderDelegate for video caching but the video plays after it will be fully downloaded.
I add KVO observer to loadedTimeRanges property:
[self.player.currentItem addObserver:self
forKeyPath:#"loadedTimeRanges"
options:NSKeyValueObservingOptionNew
context:nil];
Check ranges and start playing:
- (void)checkRanges {
if (self.player.currentItem.asset) {
CMTimeRange loadedTimeRange =
[[self.player.currentItem.loadedTimeRanges firstObject] CMTimeRangeValue];
Float64 videoDuration = CMTimeGetSeconds(self.player.currentItem.asset.duration);
Float64 buffered = CMTimeGetSeconds(loadedTimeRange.duration);
Float64 percent = buffered / videoDuration;
if (percent > 0.2) {
[self.player play];
}
}
}
But notifications aren't working, for some reason, I get a notification when the video is almost downloaded.
Video downloading is implemented by using AVAssetResourceLoaderDelegate that allow use cache within video playing.
The downloading is initialized in the method AVAssetResourceLoaderDelegate:
func resourceLoader(_ resourceLoader: AVAssetResourceLoader,
shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if let resourceURL = loadingRequest.request.url, resourceURL.isVCSVideoScheme(),
let originalURL = resourceURL.vcsRemoteVideoURL() {
let assetLoader: VideoAssetLoader
if let loader = assetLoaderForRequest(loadingRequest) {
assetLoader = loader
} else {
assetLoader = VideoAssetLoader(url: originalURL)
assetLoader.delegate = self
assetLoaders[keyForAssetLoaderWithURL(resourceURL)] = assetLoader
}
assetLoader.addRequest(loadingRequest)
return true
}
return false
}
VideoAssetLoader receives a HEAD request with file information and then updates AVAssetResourceLoadingRequest:
private func fillInContentInformation(_ contentInformationRequest: AVAssetResourceLoadingContentInformationRequest?) {
guard let contentInformationRequest = contentInformationRequest,
let contentInformation = self.contentInformation else {
return
}
contentInformationRequest.isByteRangeAccessSupported = contentInformation.byteRangeAccessSupported
contentInformationRequest.contentType = contentInformation.contentType
contentInformationRequest.contentLength = Int64(contentInformation.contentLength)
}
Then VideoAssetLoader downloads requested range of bytes and then updates AVAssetResourceLoadingDataRequest:
private func processPendingRequests() {
for request in pendingRequests {
fillInContentInformation(request.contentInformationRequest)
let didRespondCompletely = respondWithDataForRequest(request.dataRequest)
if didRespondCompletely {
request.finishLoading()
}
}
pendingRequests = pendingRequests.filter({ !$0.isFinished })
}
private func respondWithDataForRequest(_ dataRequest: AVAssetResourceLoadingDataRequest?) -> Bool {
guard let dataRequest = dataRequest, let downloadTask = videoDownloadTask else {
return false
}
var startOffset: UInt64 = UInt64(dataRequest.requestedOffset)
if dataRequest.currentOffset != 0 {
startOffset = UInt64(dataRequest.currentOffset)
}
let numberOfBytesToRespondWith = UInt64(dataRequest.requestedLength)
var didRespondFully = false
if let data = downloadTask.readCachedDataWithOffset(startOffset, lenght: numberOfBytesToRespondWith) {
dataRequest.respond(with: data)
didRespondFully = data.count >= dataRequest.requestedLength
}
return didRespondFully
}
Unfortunately, the video isn't playing while it won't be completely downloaded (AVAssetResourceLoadingRequest.finishLoading()), also not getting notification loadedTimeRanges.
Does anyone have experience with this to point me toward an area where I may be doing something wrong?
Thanks!

Related

Laggy WCSession sendMessageData

I am polling the apple watch for Core Motion at a frequency of 0.01. The purpose of the application is to see movement in real-time. To capture data as quickly as possible, I leverage the didReceiveMessageData/ sendMessageData functions. On the iPhone, I have a simple function that reads the data:
func session(_ session: WCSession, didReceiveMessageData messageData: Data) {
let records : [Double] = try! NSKeyedUnarchiver.unarchivedObject(ofClasses: [NSArray.self], from: messageData) as! [Double]
}
And on an Apple Watch 6, I have a simple function that sends the data. However, sending suffers from a sporadic yet significant delay.
class MyController: WKInterfaceController, WCSessionDelegate {
private let motion = CMMotionManager()
private let motionQueue = OperationQueue()
private let messagingQueue = OperationQueue()
private let healthStore = HKHealthStore()
private var stack : QuaternionStack = QuaternionStack()
override init() {
super.init()
if WCSession.isSupported() {
let session = WCSession.default
session.delegate = self
if session.activationState == .notActivated { session.activate() }
}
// Serial queue for sample handling and calculations.
messagingQueue.qualityOfService = .userInteractive
messagingQueue.maxConcurrentOperationCount = 1
motionQueue.qualityOfService = .userInteractive
motionQueue.maxConcurrentOperationCount = 1
startGettingData();
}
func startGettingData() {
// If we have already started the workout, then do nothing.
if (workoutSession != nil) { return }
if !motion.isDeviceMotionAvailable { return }
let workoutConfiguration = HKWorkoutConfiguration()
workoutConfiguration.activityType = .functionalStrengthTraining
workoutConfiguration.locationType = .indoor
do {
workoutSession = try HKWorkoutSession(healthStore: healthStore, configuration: workoutConfiguration)
} catch { fatalError("Unable to create the workout session!") }
// Start the workout session and device motion updates.
workoutSession!.startActivity(with: Date())
motion.deviceMotionUpdateInterval = 0.01
motion.startDeviceMotionUpdates(using: .xArbitraryZVertical, to: motionQueue) { [self] (deviceMotion: CMDeviceMotion?, _ : Error?) in
guard let motion = deviceMotion else { return }
let attitude = motion.attitude.quaternion
stack.push(Quaternion(x: attitude.x, y: attitude.y, z: attitude.z, w: attitude.w))
guard let quaternions = stack.pop() else { return }
messagingQueue.cancelAllOperations()
let blockOperation = BlockOperation()
blockOperation.addExecutionBlock({ [unowned blockOperation] in
if blockOperation.isCancelled { return }
self.sendDataToPhone(quaternions: quaternions)
})
messagingQueue.addOperation(blockOperation)
}
}
private func sendDataToPhone(quaternions: [Quaternion]) {
if WCSession.default.isReachable {
var capturedQuaternions : [Double] = [Double]()
for quat in quaternions { capturedQuaternions.append(contentsOf: [quat.x, quat.y, quat.z, quat.w]) }
WCSession.default.sendMessageData(try! NSKeyedArchiver.archivedData(withRootObject: capturedQuaternions, requiringSecureCoding: false), replyHandler: nil, errorHandler: nil);
}
}
}
I've implemented a stack as follows:
struct QuaternionStack {
private let max = 2;
private var array: [Quaternion] = []
mutating func push(_ element: Quaternion) {
array.append(element)
if array.count > max { array.removeFirst() }
}
mutating func pop() -> [Quaternion]? {
if (array.count < max) { return nil }
var results : [Quaternion] = [Quaternion]()
for _ in 0 ..< max { results.append(array.popLast()!)}
results.reverse()
array.removeAll()
return results
}
}
If I set QuaternionStack.max to a big number, like 10, I see no obvious throttling on the iPhone when receiving data. This is because I send more data but less often. However, decreasing the number degrades the performance. As an example, imagine I send every 2 incoming packets ( QuaternionStack.max = 2 ). Sometimes, a few seconds pass between when the packets are received. When this happens, the iWatch seems to send them very quickly in an effort to catch up. Another example of this issue is when listening to music on paired Apple Airpods or receiving an incoming call. The WCSession sendMessageData from the watch becomes very inconsistent.
What must I do to increase the throughput of the WCSession sendMessageData ? The application I am writing requires very fast ( 100hz ) and continuous motion updates.

Real-time AVAssetWriter synchronise audio and video when pausing/resuming

I am trying to record a video with sound using iPhone's front camera. As I need to also support pause/resume functionality, I need to use AVAssetWriter. I've found an example online, written in Objective-C, which almost achieves the desired functionality (http://www.gdcl.co.uk/2013/02/20/iPhone-Pause.html)
Unfortunately, after converting this example to Swift, I notice that if I pause/resume, at the end of each "section" there is a small but noticeable period during which the video is just a still frame and the audio is playing. So, it seems that when isPaused is triggered, the recorded audio track is longer than the recorded video track.
Sorry if it may seem like a noob question, but I am not a great expert in AVFoundation and some help would be appreciated!
Below I post my implementation of didOutput sampleBuffer.
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
var isVideo = true
if videoConntection != connection {
isVideo = false
}
if (!isCapturing || isPaused) {
return
}
if (encoder == nil) {
if isVideo {
return
}
if let fmt = CMSampleBufferGetFormatDescription(sampleBuffer) {
let desc = CMAudioFormatDescriptionGetStreamBasicDescription(fmt as CMAudioFormatDescription)
if let chan = desc?.pointee.mChannelsPerFrame, let rate = desc?.pointee.mSampleRate {
let path = tempPath()!
encoder = VideoEncoder(path: path, height: Int(cameraSize.height), width: Int(cameraSize.width), channels: chan, rate: rate)
}
}
}
if discont {
if isVideo {
return
}
discont = false
var pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let last = lastAudio
if last.flags.contains(CMTimeFlags.valid) {
if cmOffset.flags.contains(CMTimeFlags.valid) {
pts = CMTimeSubtract(pts, cmOffset)
}
let off = CMTimeSubtract(pts, last)
print("setting offset from \(isVideo ? "video":"audio")")
print("adding \(CMTimeGetSeconds(off)) to \(CMTimeGetSeconds(cmOffset)) (pts \(CMTimeGetSeconds(cmOffset)))")
if cmOffset.value == 0 {
cmOffset = off
}
else {
cmOffset = CMTimeAdd(cmOffset, off)
}
}
lastVideo.flags = []
lastAudio.flags = []
return
}
var out:CMSampleBuffer?
if cmOffset.value > 0 {
var count:CMItemCount = CMSampleBufferGetNumSamples(sampleBuffer)
let pInfo = UnsafeMutablePointer<CMSampleTimingInfo>.allocate(capacity: count)
CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: count, arrayToFill: pInfo, entriesNeededOut: &count)
var i = 0
while i<count {
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, cmOffset)
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, cmOffset)
i+=1
}
CMSampleBufferCreateCopyWithNewTiming(allocator: nil, sampleBuffer: sampleBuffer, sampleTimingEntryCount: count, sampleTimingArray: pInfo, sampleBufferOut: &out)
}
else {
out = sampleBuffer
}
var pts = CMSampleBufferGetPresentationTimeStamp(out!)
let dur = CMSampleBufferGetDuration(out!)
if (dur.value > 0)
{
pts = CMTimeAdd(pts, dur);
}
if (isVideo) {
lastVideo = pts;
}
else {
lastAudio = pts;
}
encoder?.encodeFrame(sampleBuffer: out!, isVideo: isVideo)
}
And this is my VideoEncoder class:
final class VideoEncoder {
var writer:AVAssetWriter
var videoInput:AVAssetWriterInput
var audioInput:AVAssetWriterInput
var path:String
init(path:String, height:Int, width:Int, channels:UInt32, rate:Float64) {
self.path = path
if FileManager.default.fileExists(atPath:path) {
try? FileManager.default.removeItem(atPath: path)
}
let url = URL(fileURLWithPath: path)
writer = try! AVAssetWriter(outputURL: url, fileType: .mp4)
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey:height,
AVVideoHeightKey:width
])
videoInput.expectsMediaDataInRealTime = true
writer.add(videoInput)
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: [
AVFormatIDKey:kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey:channels,
AVSampleRateKey:rate
])
audioInput.expectsMediaDataInRealTime = true
writer.add(audioInput)
}
func finish(with completionHandler:#escaping ()->Void) {
writer.finishWriting(completionHandler: completionHandler)
}
func encodeFrame(sampleBuffer:CMSampleBuffer, isVideo:Bool) -> Bool {
if CMSampleBufferDataIsReady(sampleBuffer) {
if writer.status == .unknown {
writer.startWriting()
writer.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
if writer.status == .failed {
QFLogger.shared.addLog(format: "[ERROR initiating AVAssetWriter]", args: [], error: writer.error)
return false
}
if isVideo {
if videoInput.isReadyForMoreMediaData {
videoInput.append(sampleBuffer)
return true
}
}
else {
if audioInput.isReadyForMoreMediaData {
audioInput.append(sampleBuffer)
return true
}
}
}
return false
}
}
The rest of the code should be pretty obvious, but just to make it complete, here is what I have for pausing:
isPaused = true
discont = true
And here is resume:
isPaused = false
If anyone could help me to understand how to align video and audio tracks during such live recording that would be great!
Ok, turns out there was no mistake in the code which I provided. The issue which I experienced was caused by a video smoothing which was turned ON :) I guess it needs extra frames to smooth the video, which is why the video output freezes at the end for a short period of time.

Troubles with MPRemoteCommandCenter

I have two troubles with MPRemoteCommandCenter:
1) When I change song, remote controls delete previous song image, make default song image, make next song image. I don't want to see default image. I spent a couple of time to find a solution.
2) When AVPlayer is live streaming sound, remote controls become non-active with circle arrows ( into circle arrow is number 15, what does it mean??).
Here is my code for playing sound:
public func playAVSound(trackName : String) -> String {
let path = _findPath(trackName: trackName);
if (path == "") {
return "getting url"
}
if (self.AVPlayerVC.player == nil) {
print("Init remote control events...")
UIApplication.shared.beginReceivingRemoteControlEvents()
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.nextTrackCommand.isEnabled = true
commandCenter.nextTrackCommand.addTarget(self, action:#selector(self.next))
commandCenter.previousTrackCommand.isEnabled = true
commandCenter.previousTrackCommand.addTarget(self, action:#selector(self.previous))
}
if (self.AVPlayerVC.player != nil && self.play_info.trackName == trackName) {
if (self.play_info.paused!) {
self.AVPlayerVC.player?.play()
self.updatePlayInfo(number: Global.PlayList.find_by_trackName(trackName: trackName), trackName: trackName, path: path, paused: false)
return "continue"
} else {
self.AVPlayerVC.player?.pause()
self.play_info.paused = true
return "pause"
}
}
let nsurl = NSURL(string: path)
if let url = nsurl {
print("Play AV from : \(url)")
let item = AVPlayerItem(url: url as URL)
if (self.AVPlayerVC.player?.currentItem == nil) {
self.AvPlayer = AVPlayer(playerItem: item)
self.AVPlayerVC.player = self.AvPlayer
self.AVPlayerVC.player?.automaticallyWaitsToMinimizeStalling = false
} else {
self.AvPlayer?.replaceCurrentItem(with: item)
}
NotificationCenter.default.addObserver(self, selector: #selector(self.playerDidFinishPlaying), name: .AVPlayerItemDidPlayToEndTime, object: item)
self.AVPlayerVC.player?.play()
self.updatePlayInfo(number: Global.PlayList.find_by_trackName(trackName: trackName), trackName: trackName, path: path, paused: false)
return "playing"
} else {
print ("Incorrect nsurl")
}
return "error"
}
private func _findPath (trackName: String) -> String {
let n = Global.PlayList.find_by_trackName(trackName: trackName)
var path = ""
if (n >= 0) {
let p_item = Global.PlayList.PlaylistItems[n];
if (!p_item.fromData!) {
if (p_item.playing_url == nil) {
NetLib.makeTrackUrl(trackName: trackName, closure: self.playAVSound)
} else {
path = p_item.playing_url!
}
} else {
path = NetLib.makePath(filename: p_item.filename!)
}
} else {
print("Error find_path: \(trackName) was not found in playlist")
}
return path
}
And here is code, that changes song
#objc private func previous() -> MPRemoteCommandHandlerStatus{
var n = self.play_info.number! - 1;
if (self.play_info.number == 0) {
n = Global.PlayList.size() - 1;
}
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "load"), object: nil)
_ = self.playAVSound(trackName: Global.PlayList.PlaylistItems[n].trackName!)
print("Previous song \(Global.PlayList.PlaylistItems[n].trackName!)")
return .success
}
I resolve trouble with controls while audio streaming. I play audio not from main thread, so remote controls work incorrect in that thread.

How to disable audio in webrtc mobile app(ios) without changing in framework

I am working with webrtc mobile(ios). I can't disable audio in webrtc(ios). I have got no flag to disable audio.By changing in framwork/library it can done easily. My purpose is that I have to disable audio without changing in framework/library. Can anyone help me?.
Update your question with code snippet, how you are creating mediaStrem or tracks(audio/video).
Generally with default Native WebRTC Framework,
RTCMediaStream localStream = [_factory mediaStreamWithStreamId:kARDMediaStreamId];
if(audioRequired) {
RTCAudioTrack *aTrack = [_lmStream createLocalAudioTrack];
[localStream addAudioTrack:aTrack];
}
RTCVideoTrack *vTrack = [_lmStream createLocalVideoTrack];
[localStream addVideoTrack:vTrack];
[_peerConnection addStream:localStream];
If you want to mute the Audio during the call, use below function.
- (void)enableAudio:(NSString *)id isAudioEnabled:(BOOL) isAudioEnabled {
NSLog(#"Auido enabled: %d streams count:%d ", id, isAudioEnabled, _peerConnection.localStreams.count);
if(_peerConnection.localStreams.count > 0) {
RTCMediaStream *lStream = _peerConnection.localStreams[0];
if(lStream.audioTracks.count > 0) { // Usually we will have only one track. If you have more than one, need to traverse all.
// isAudioEnabled == 1 -> Unmute
// isAudioEnabled == 0 -> Mute
[lStream.audioTracks[0] setIsEnabled:isAudioEnabled];
}
}
}
in my case I didnt use streams and directly add audio track to peerconnection.
private func createMediaSenders() {
let streamId = "stream"
// Audio
let audioTrack = self.createAudioTrack()
self.pc.add(audioTrack, streamIds: [streamId])
// Video
/* let videoTrack = self.createVideoTrack()
self.localVideoTrack = videoTrack
self.peerConnection.add(videoTrack, streamIds: [streamId])
self.remoteVideoTrack = self.peerConnection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack
// Data
if let dataChannel = createDataChannel() {
dataChannel.delegate = self
self.localDataChannel = dataChannel
}*/
}
private func createAudioTrack() -> RTCAudioTrack {
let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)
let audioSource = sessionFactory.audioSource(with: audioConstrains)
let audioTrack = sessionFactory.audioTrack(with: audioSource, trackId: "audio0")
return audioTrack
}
to mute and unmute microphone I use this function
public func muteMicrophone(_ mute:Bool){
for sender in pc.senders{
if (sender.track?.kind == "audio") {
sender.track?.isEnabled = mute
}
}
}

Swift AVFoundation Reading and Analyzing a file in real time

I am having trouble reading a file from the operating system using AVFoundation and performing rendering and analysis in real time.
I have a pipe line of code that I know runs in real time does analysis on a video file. This pipe line of code works in realtime via the camera session. However this is not the case when I read the file like so.Can anyone let me know where I might be going wrong ?
protocol VideoStreamTestBenchDelegate {
func frameBuffer(buffer:CMSampleBuffer)
}
class VideoStreamTestBench {
let asset:AVAsset
let assetReader:AVAssetReader
let playAtActualSpeed:Bool
let loop:Bool
var videoEncodingIsFinished = false
var previousFrameTime = kCMTimeZero
var previousActualFrameTime = CFAbsoluteTimeGetCurrent()
var numberOfFramesCaptured = 0
var totalFrameTimeDuringCapture:Double = 0.0
var delegate:VideoStreamTestBenchDelegate?
public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]
let inputAsset = AVURLAsset(url:url, options:inputOptions)
try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop)
}
public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
self.asset = asset
self.playAtActualSpeed = playAtActualSpeed
self.loop = loop
assetReader = try AVAssetReader(asset:self.asset)
let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_32BGRA))]
let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings)
readerVideoTrackOutput.alwaysCopiesSampleData = false
assetReader.add(readerVideoTrackOutput)
// TODO: Audio here
}
public func start() {
asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{
DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: {
guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return }
guard self.assetReader.startReading() else {
print("Couldn't start reading")
return
}
var readerVideoTrackOutput:AVAssetReaderOutput? = nil;
for output in self.assetReader.outputs {
if(output.mediaType == AVMediaTypeVideo) {
readerVideoTrackOutput = output;
}
}
while (self.assetReader.status == .reading) {
self.readNextVideoFrame(from:readerVideoTrackOutput!)
}
if (self.assetReader.status == .completed) {
self.assetReader.cancelReading()
if (self.loop) {
// TODO: Restart movie processing
} else {
self.endProcessing()
}
}
})
})
}
public func cancel() {
assetReader.cancelReading()
self.endProcessing()
}
func endProcessing() {
}
func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) {
if ((assetReader.status == .reading) && !videoEncodingIsFinished) {
if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() {
if (playAtActualSpeed) {
// Do this outside of the video processing queue to not slow that down while waiting
let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime)
let currentActualTime = CFAbsoluteTimeGetCurrent()
let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame)
let actualTimeDifference = currentActualTime - previousActualFrameTime
if (frameTimeDifference > actualTimeDifference) {
usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference))))
}
previousFrameTime = currentSampleTime
previousActualFrameTime = CFAbsoluteTimeGetCurrent()
}
DispatchQueue.global().sync {
self.delegate?.frameBuffer(buffer: sampleBuffer)
CMSampleBufferInvalidate(sampleBuffer)
}
} else {
if (!loop) {
videoEncodingIsFinished = true
if (videoEncodingIsFinished) {
self.endProcessing()
}
}
}
}
}
}
// This is the delegate
public func bufferReader(_ reader: BufferReader!, didGetNextVideoSample bufferRef: CMSampleBuffer!) {
// let posePoints:[Any] = self.visageBackend.posePoints(with: bufferRef)
// var regions:[Any]? = nil
//
// if (posePoints.count > 0) {
// regions = (self.luaBackend?.regions(forPosePoints: posePoints))!
// }
//
// // extract
// if(regions != nil) {
// let rois:[Any] = (self.luaBackend?.extractedRegionInfos(for: bufferRef, andRegions: regions))!
// print(rois)
// }
//
// self.dLibRenderEngine.render(with: bufferRef, andPoints: posePoints, andRegions: regions)
self.backgroundRenderQueue.async { [weak self] in
if self?.previewLayer?.isReadyForMoreMediaData == true {
self?.previewLayer?.enqueue(bufferRef!)
}
}
}

Resources