Error when setting UISlider's Min and Max values - ios

I am playing videos from the user's library in my app. I use this method in ViewDidLoad() to get the video:
fileprivate let imageManager = PHImageManager()
fileprivate var playerLayer: AVPlayerLayer?
fileprivate var player:AVPlayer?
fileprivate var videoView:UIView?
imageManager.requestPlayerItem(forVideo: videoAsset, options: options, resultHandler: { playerItem, info in
DispatchQueue.main.sync {
guard self.playerLayer == nil else { return }
self.player = AVPlayer(playerItem: playerItem)
self.playerLayer = AVPlayerLayer(player: self.player)
self.videoView = UIView(frame: self.view.frame)
self.videoView?.contentMode = .scaleAspectFit
self.playerLayer?.videoGravity = AVLayerVideoGravity.resizeAspect
self.playerLayer?.frame = self.videoView!.layer.bounds
self.videoView!.layer.addSublayer(self.playerLayer!)
self.photoScrollView.addSubview(self.videoView!)
self.addObserversForVideo()
}
})
Inside the addObserversForVideo() I set different observers to update a slider that controls the video and also set its min and max values:
guard let currentPlayer = player else {return}
guard let currentItem = currentPlayer.currentItem else {return}
NotificationCenter.default.addObserver(self,
selector: #selector(self.playerFinishedPlayingVideo),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: nil)
let interval = CMTime(seconds: 0.5, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
_ = playerLayer?.player?.addPeriodicTimeObserver(forInterval: interval, queue: DispatchQueue.main, using: { [weak self] (time) in
self?.videoSlider.maximumValue = Float(currentItem.duration.seconds)
self?.videoSlider.minimumValue = 0.0
self?.videoSlider.value = Float(currentItem.currentTime().seconds)
self?.videoElapsedTimeLabel.text = self?.getStringFromCMTime(time: currentItem.currentTime())
let remainingTime = currentItem.duration - currentItem.currentTime()
self?.videoRemainingTimeLabel.text = self?.getStringFromCMTime(time: remainingTime)
})
Now, the problem is that I sometimes get this error which crashes my app:
'NSInternalInconsistencyException', reason: 'Attempting to set a
slider's minimumValue (0.000000) to be larger than the maximumValue
(nan)'
I don't understand why this happens as I am checking the currentItem to be non-nil with a guard statement at the beginning, and also I am setting minimum value after the maximum value. I would appreciate it if someone can help me out.

Thanks to #TonyNguyen I could fix the problem in one line:
guard currentItem.status.rawValue == AVPlayerItem.Status.readyToPlay.rawValue else {return}

You need to guard against two additional things:
currentPlayer.currentItem.status == .readyToPlay
currentPlayer.currentItem.duration >= CMTime.zero

In my periodicTimeObserver I change the (time) to seconds then check if the value is NaN or Infinite before updating anything.
_ = player?.addPeriodicTimeObserver(forInterval: interval, queue: DispatchQueue.main, using: { [weak self] (time) in
let seconds = CMTimeGetSeconds(time)
guard !(seconds.isNaN || seconds.isInfinite) else {
return
}
// the rest of your code
})

Related

Swift - How to achieve 25 FPS for screen recording multiple AVPlayers at the same time

I have a screen recorder that can record two AVPlayer playings simultaneously but I want to improve the frame rate per second to 25.
I use AVAssetImageGenerator() to take a still and then load this image onto a View hidden underneath the corresponding AVPlayer. I then take a screenshot using UIGraphicsGetImageFromCurrentImageContext() combining the lot together. I then save the images to the app. This function happens around 14 times a second. When the recording stops, I use FFMPEG to concatenate all the images together into a video to around 30 fps.
The video result looks okay but I like to improve the number of screenshots I take per second further so it looks smoother. Any ideas on how I could improve the code to take a few more screenshots per second? I hope this makes sense.
var limit = 2000
var screenshotTaken = 0
var view: UIView?
var screenRecording: Bool = false
var compilingVideo: Bool = false
let leftPlayerUrl: URL?
let leftPlayer: AVPlayer?
let leftPlayerImageView: UIImageView?
let rightPlayerUrl: URL?
let rightPlayer: AVPlayer?
let rightPlayerImageView: UIImageView?
init(view: UIView, leftPlayerUrl: URL, leftPlayer: AVPlayer, leftPlayerImageView: UIImageView, rightPlayerUrl: URL, rightPlayer: AVPlayer, rightPlayerImageView: UIImageView) {
self.view = view
self.leftPlayerUrl = leftPlayerUrl
self.leftPlayer = leftPlayer
self.leftPlayerImageView = leftPlayerImageView
self.rightPlayerUrl = rightPlayerUrl
self.rightPlayer = rightPlayer
self.rightPlayerImageView = rightPlayerImageView
}
func capture()
{
if screenRecording {
if limit >= screenshotTaken {
//the delay should be 0.04 to hit 25 fps but the max screenshots taken is 16 per second
delay(0.07) {
DispatchQueue.main.async {
self.complexScreenshot()
}
self.capture()
}
} else {
DebugPrint.DBprint("Screenshot limit reached or recording stopped")
delegate?.screenShotLimitReached()
}
}
}
func delay(_ delay: Double, closure: #escaping ()->()) {
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(delay * Double(NSEC_PER_SEC))) / Double(NSEC_PER_SEC), execute: closure)
}
#objc func complexScreenshot() {
guard let url = leftPlayerUrl else {return}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.maximumSize = CGSize(width: 640, height: 480)
imageGenerator.requestedTimeToleranceAfter = CMTime.zero
imageGenerator.requestedTimeToleranceBefore = CMTime.zero
if let thumb: CGImage = try? imageGenerator.copyCGImage(at: leftPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb)
self.leftPlayerImageView?.image = videoImage
}
guard let url2 = rightPlayerUrl else {return}
let asset2 = AVAsset(url: url2)
let imageGenerator2 = AVAssetImageGenerator(asset: asset2)
imageGenerator2.maximumSize = CGSize(width: 640, height: 480)
imageGenerator2.requestedTimeToleranceAfter = CMTime.zero
imageGenerator2.requestedTimeToleranceBefore = CMTime.zero
if let thumb2: CGImage = try? imageGenerator2.copyCGImage(at: rightPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb2)
self.rightPlayerImageView?.image = videoImage
}
guard let bounds = view?.bounds else {return}
UIGraphicsBeginImageContextWithOptions(bounds.size, view?.isOpaque ?? true, 0.0)
self.view?.drawHierarchy(in: bounds, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.leftPlayerImageView?.image = nil
self.rightPlayerImageView?.image = nil
if image != nil {
DispatchQueue.global(qos: .utility).async { [weak self] in
self?.saveScreenshot(image: image!, number: self!.screenshotTaken)
}
}
screenshotTaken = screenshotTaken + 1
}
func saveScreenshot(image: UIImage, number: Int) {
let number = String(format: "%04d", number)
let filePath = URL(fileURLWithPath: self.mainPath).appendingPathComponent("Temp/image_\(number).jpg")
autoreleasepool {
if let data = image.jpegData(compressionQuality: 0.4),
!self.fileManager.fileExists(atPath: filePath.path) {
do {
try data.write(to: filePath)
} catch {
print("Error saving file: ", error)
}
}
}
}

How do you modify an AVFoundation video copy (multiple splits and segment deletions)?

I've been looking through Apple's sample code Building a Feature-Rich App for Sports Analysis and its associated WWDC video to learn to reason about AVFoundation and VNDetectTrajectoriesRequest. My goal is to allow the user to import videos (this part I have working, the user sees a UIDocumentBrowserViewController, picks a video file, and then a copy is made), but I only want segments of the original video copied where trajectories are detected from a ball moving.
I've tried as best I can to grasp the two parts, at the very least finding where the video copy is made and where the trajectory request is made.
The full video copy happens in CameraViewController.swift (I'm starting with just imported video for now and not reading live from the device's video camera), line 160:
func startReadingAsset(_ asset: AVAsset) {
videoRenderView = VideoRenderView(frame: view.bounds)
setupVideoOutputView(videoRenderView)
// Setup display link
let displayLink = CADisplayLink(target: self, selector: #selector(handleDisplayLink(_:)))
displayLink.preferredFramesPerSecond = 0 // Use display's rate
displayLink.isPaused = true
displayLink.add(to: RunLoop.current, forMode: .default)
guard let track = asset.tracks(withMediaType: .video).first else {
AppError.display(AppError.videoReadingError(reason: "No video tracks found in AVAsset."), inViewController: self)
return
}
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
let settings = [
String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
]
let output = AVPlayerItemVideoOutput(pixelBufferAttributes: settings)
playerItem.add(output)
player.actionAtItemEnd = .pause
player.play()
self.displayLink = displayLink
self.playerItemOutput = output
self.videoRenderView.player = player
let affineTransform = track.preferredTransform.inverted()
let angleInDegrees = atan2(affineTransform.b, affineTransform.a) * CGFloat(180) / CGFloat.pi
var orientation: UInt32 = 1
switch angleInDegrees {
case 0:
orientation = 1 // Recording button is on the right
case 180, -180:
orientation = 3 // abs(180) degree rotation recording button is on the right
case 90:
orientation = 8 // 90 degree CW rotation recording button is on the top
case -90:
orientation = 6 // 90 degree CCW rotation recording button is on the bottom
default:
orientation = 1
}
videoFileBufferOrientation = CGImagePropertyOrientation(rawValue: orientation)!
videoFileFrameDuration = track.minFrameDuration
displayLink.isPaused = false
}
#objc
private func handleDisplayLink(_ displayLink: CADisplayLink) {
guard let output = playerItemOutput else {
return
}
videoFileReadingQueue.async {
let nextTimeStamp = displayLink.timestamp + displayLink.duration
let itemTime = output.itemTime(forHostTime: nextTimeStamp)
guard output.hasNewPixelBuffer(forItemTime: itemTime) else {
return
}
guard let pixelBuffer = output.copyPixelBuffer(forItemTime: itemTime, itemTimeForDisplay: nil) else {
return
}
// Create sample buffer from pixel buffer
var sampleBuffer: CMSampleBuffer?
var formatDescription: CMVideoFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: pixelBuffer, formatDescriptionOut: &formatDescription)
let duration = self.videoFileFrameDuration
var timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: itemTime, decodeTimeStamp: itemTime)
CMSampleBufferCreateForImageBuffer(allocator: nil,
imageBuffer: pixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: formatDescription!,
sampleTiming: &timingInfo,
sampleBufferOut: &sampleBuffer)
if let sampleBuffer = sampleBuffer {
self.outputDelegate?.cameraViewController(self, didReceiveBuffer: sampleBuffer, orientation: self.videoFileBufferOrientation)
DispatchQueue.main.async {
let stateMachine = self.gameManager.stateMachine
if stateMachine.currentState is GameManager.SetupCameraState {
// Once we received first buffer we are ready to proceed to the next state
stateMachine.enter(GameManager.DetectingBoardState.self)
}
}
}
}
}
Line 139 self.outputDelegate?.cameraViewController(self, didReceiveBuffer: sampleBuffer, orientation: self.videoFileBufferOrientation) is where the video sample buffer is passed to the Vision framework subsystem for analyzing trajectories, the second part. This delegate callback is implemented in GameViewController.swift on line 335:
// Perform the trajectory request in a separate dispatch queue.
trajectoryQueue.async {
do {
try visionHandler.perform([self.detectTrajectoryRequest])
if let results = self.detectTrajectoryRequest.results {
DispatchQueue.main.async {
self.processTrajectoryObservations(controller, results)
}
}
} catch {
AppError.display(error, inViewController: self)
}
}
Trajectories found are drawn over the video in self.processTrajectoryObservations(controller, results).
Where I'm stuck now is modifying this so that instead of drawing the trajectories, the new video only copies parts of the original video to it where trajectories were detected in the frame.
If you know the number of seconds into the current video, and the duration, you can transcode/export parts of the content original video using AVAssetExportSession.
Here's a piece of code I used a few years back to do something similar. It's swift 3 so the syntax might be slightly different than now.
let asset = AVURLAsset(url: originalFileURL)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputURL = someOutputURLYouWant
exportSession.outputFileType = AVFileTypeMPEG4 // can choose other types than mp4 if you want
let start = CMTimeMakeWithSeconds(secondsIntoVideoFloat, 600)
let duration = CMTimeMakeWithSeconds(numberOfSecondsFloat, 600)
exportSession.timeRange = CMTimeRange(start, duration)
exportSession.exportAsynchronously {
switch exportSession.status {
// handle completed, failed, cancelled states.
}
}

Why is AVPlayer periodic observer showing current time before the start time on device?

With AVPlayer, after seeking to a specific time (let's say 0.5s) and calling play(), periodic observer fires few callbacks with player's currentTime() before the seek position.
Here's the code:
test wave file
import AVFoundation
class Player {
init() {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, mode: .videoRecording)
try session.setActive(true)
session.requestRecordPermission { allowed in
if !allowed {
fatalError("you must allow mic access")
}
}
} catch {
fatalError(error.localizedDescription)
}
addPeriodicTimeObserver(to: player)
if let url = Bundle.main.url(forResource: "test_loop", withExtension: "wav") {
let item = AVPlayerItem(asset: AVAsset(url: url))
player.replaceCurrentItem(with: item)
}
DispatchQueue.main.asyncAfter(deadline: .now() + 3) { [weak self] in
self?.seek() {
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
self?.play()
}
}
}
}
func addPeriodicTimeObserver(to player: AVPlayer) {
let time = CMTime(seconds: 0.04, preferredTimescale: timeScale)
_ = player.addPeriodicTimeObserver(forInterval: time, queue: .main) { [unowned self] _ in
if
self.playCalled == true &&
self.player.currentItem?.status == .readyToPlay &&
self.player.status == .readyToPlay &&
self.player.timeControlStatus == .playing {
print("currentTime \(self.player.currentTime().seconds)")
}
}
}
func seek(completion: #escaping () -> Void) {
let time = CMTime(seconds: 0.5, preferredTimescale: timeScale)
player.seek(to: time, toleranceBefore: .zero, toleranceAfter: .zero) { [unowned self] in
if $0 {
print("seek current time: \(self.player.currentTime().seconds)")
completion()
}
}
}
func play() {
if let currentItem = player.currentItem, currentItem.status == .readyToPlay {
player.play()
playCalled = true
}
}
let player = AVPlayer()
let timeScale = CMTimeScale(NSEC_PER_SEC)
var playCalled: Bool = false
}
It produces:
seek current time: 0.5
currentTime 0.475462793
currentTime 0.475983585
currentTime 0.48074996
currentTime 0.521489168
currentTime 0.561431835
currentTime 0.601529793
currentTime 0.641514043
[...]
IMPORTANT NOTE: This happens only when you run it on the device. Simulator runs are behaving as expected.
As you can see, I tried to guard myself as I could not to print any current time before it might be relevant, but still I get confusing results. Any ideas on what might the problem be and how to fix it? I would love to see the current time staring at the moment I seeked to :)

How to AVAudioPlayer with progress bar show using swift?

I'm playing a song using AVAudioPlayer. I need a progress bar to show the progress of the song.
My issue is that the progress bar's progress isn't working properly. Within 2-3 seconds, it finishes its progress.
func playMusic() {
do {
player = try AVAudioPlayer(contentsOf: (currentSong?.mediaURL)!)
guard let player = player else { return }
player.prepareToPlay()
player.play()
updater = CADisplayLink(target: self, selector: #selector(self.musicProgress))
updater.frameInterval = 1
updater.add(to: RunLoop.current, forMode: RunLoop.Mode.common)
playButton.setImage(UIImage.init(named: "pause"), for: .normal)
} catch let error as NSError {
print(error.description)
}
}
#objc func musicProgress() {
let normalizedTime = Float(self.player?.currentTime as! Double * 100.0 / (self.player?.duration as! Double) )
self.progressMusic.progress = normalizedTime
}
The issue is here:
let normalizedTime = Float(self.player?.currentTime as! Double * 100.0 / (self.player?.duration as! Double) )
With this you will get a value between 0.0 and 100.0, but according to UIProgressView documentation, progress must be between 0.0 and 1.0. Try
let normalizedTime = Float(self.player?.currentTime as! Double / (self.player?.duration as! Double) )

AVPlayer returns nan videos

I am trying to play videos on avplayer uing the following code...but get nothing as result as it shows duration nan.
func setUpPlayer(fileURL:URL){
let playerItem:AVPlayerItem = AVPlayerItem(url: fileURL)
player = AVPlayer(playerItem: playerItem)
let playerLayer=AVPlayerLayer(player: player!)
playerLayer.frame=CGRect(x:self.videoContainer.frame.origin.x, y:self.videoContainer.frame.origin.y+20, width:self.videoContainer.frame.size.width, height:self.videoContainer.frame.size.height-40)
player?.addObserver(
self, forKeyPath:"currentItem", options:.initial, context:nil)
self.view.layer.addSublayer(playerLayer)
rangeSlider.setVideoURL(videoURL:fileURL)
rangeSlider.delegate = self
self.endTime = CMTimeGetSeconds((player?.currentItem?.duration)!)
let timeInterval: CMTime = CMTimeMakeWithSeconds(0.01, 100)
// let asset:AVURLAsset = AVURLAsset.init(url:videoURL)
// let videoDuration:CMTime = asset.duration;
//
//
// let timeInterval: CMTime = CMTimeMakeWithSeconds(videoDuration,100)
//CMTimeGetSeconds(videoDuration)
timeObserver = player?.addPeriodicTimeObserver(forInterval: timeInterval,
queue: DispatchQueue.main) { (elapsedTime: CMTime) -> Void in
self.observeTime(elapsedTime: elapsedTime)
} as AnyObject!
}
I am doing this first time.Kindly give some solution to resolve this problem.Thanks in advance!
you missed to write "player.play()" in your code
let timeRange = self.avPlayer.currentItem.loadedTimeRanges[0].CMTimeRangeValue
let duration = CMTimeGetSeconds(timeRange.duration)
try this it will definiyely helpful

Resources