I am trying to play videos on avplayer uing the following code...but get nothing as result as it shows duration nan.
func setUpPlayer(fileURL:URL){
let playerItem:AVPlayerItem = AVPlayerItem(url: fileURL)
player = AVPlayer(playerItem: playerItem)
let playerLayer=AVPlayerLayer(player: player!)
playerLayer.frame=CGRect(x:self.videoContainer.frame.origin.x, y:self.videoContainer.frame.origin.y+20, width:self.videoContainer.frame.size.width, height:self.videoContainer.frame.size.height-40)
player?.addObserver(
self, forKeyPath:"currentItem", options:.initial, context:nil)
self.view.layer.addSublayer(playerLayer)
rangeSlider.setVideoURL(videoURL:fileURL)
rangeSlider.delegate = self
self.endTime = CMTimeGetSeconds((player?.currentItem?.duration)!)
let timeInterval: CMTime = CMTimeMakeWithSeconds(0.01, 100)
// let asset:AVURLAsset = AVURLAsset.init(url:videoURL)
// let videoDuration:CMTime = asset.duration;
//
//
// let timeInterval: CMTime = CMTimeMakeWithSeconds(videoDuration,100)
//CMTimeGetSeconds(videoDuration)
timeObserver = player?.addPeriodicTimeObserver(forInterval: timeInterval,
queue: DispatchQueue.main) { (elapsedTime: CMTime) -> Void in
self.observeTime(elapsedTime: elapsedTime)
} as AnyObject!
}
I am doing this first time.Kindly give some solution to resolve this problem.Thanks in advance!
you missed to write "player.play()" in your code
let timeRange = self.avPlayer.currentItem.loadedTimeRanges[0].CMTimeRangeValue
let duration = CMTimeGetSeconds(timeRange.duration)
try this it will definiyely helpful
Related
I have a code that applies cifilters to a video and play it once applied but for some reason, there is a noticeable lag as compared to playing it without filter.
func addComposition(editItem: EditItem, addCrop: Bool = true) {
pause()
let renderSize = editItem.getVideoRenderSize(track: track, editItem: editItem, addCrop: addCrop)
let videoComposition = AVMutableVideoComposition(asset: asset) { [weak self] (request) in
guard let self = self else { return }
let input = request.sourceImage.clampedToExtent()
let frameCounter = Float(CMTimeGetSeconds(request.compositionTime)) * self.fps
if !editItem.defersPreview {
let outputImage = editItem.executeVideoFilterPipeline(on: input, excludedTypes: addCrop ? [] : [.crop], frameIndex: frameCounter, request: request, renderSize: renderSize)
if let output = outputImage {
request.finish(with: output, context: self.context)
}
}
}
videoComposition.renderSize = renderSize
videoComposition.sourceTrackIDForFrameTiming = kCMPersistentTrackID_Invalid
let frameRateTooHighForPhone = CMTime(value: 1, timescale: 1000)
videoComposition.frameDuration = frameRateTooHighForPhone
player.currentItem?.videoComposition = videoComposition
composition = videoComposition
play()
}
I already tried updating the frameduration as I was hoping it will do the trick.
I am playing videos from the user's library in my app. I use this method in ViewDidLoad() to get the video:
fileprivate let imageManager = PHImageManager()
fileprivate var playerLayer: AVPlayerLayer?
fileprivate var player:AVPlayer?
fileprivate var videoView:UIView?
imageManager.requestPlayerItem(forVideo: videoAsset, options: options, resultHandler: { playerItem, info in
DispatchQueue.main.sync {
guard self.playerLayer == nil else { return }
self.player = AVPlayer(playerItem: playerItem)
self.playerLayer = AVPlayerLayer(player: self.player)
self.videoView = UIView(frame: self.view.frame)
self.videoView?.contentMode = .scaleAspectFit
self.playerLayer?.videoGravity = AVLayerVideoGravity.resizeAspect
self.playerLayer?.frame = self.videoView!.layer.bounds
self.videoView!.layer.addSublayer(self.playerLayer!)
self.photoScrollView.addSubview(self.videoView!)
self.addObserversForVideo()
}
})
Inside the addObserversForVideo() I set different observers to update a slider that controls the video and also set its min and max values:
guard let currentPlayer = player else {return}
guard let currentItem = currentPlayer.currentItem else {return}
NotificationCenter.default.addObserver(self,
selector: #selector(self.playerFinishedPlayingVideo),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: nil)
let interval = CMTime(seconds: 0.5, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
_ = playerLayer?.player?.addPeriodicTimeObserver(forInterval: interval, queue: DispatchQueue.main, using: { [weak self] (time) in
self?.videoSlider.maximumValue = Float(currentItem.duration.seconds)
self?.videoSlider.minimumValue = 0.0
self?.videoSlider.value = Float(currentItem.currentTime().seconds)
self?.videoElapsedTimeLabel.text = self?.getStringFromCMTime(time: currentItem.currentTime())
let remainingTime = currentItem.duration - currentItem.currentTime()
self?.videoRemainingTimeLabel.text = self?.getStringFromCMTime(time: remainingTime)
})
Now, the problem is that I sometimes get this error which crashes my app:
'NSInternalInconsistencyException', reason: 'Attempting to set a
slider's minimumValue (0.000000) to be larger than the maximumValue
(nan)'
I don't understand why this happens as I am checking the currentItem to be non-nil with a guard statement at the beginning, and also I am setting minimum value after the maximum value. I would appreciate it if someone can help me out.
Thanks to #TonyNguyen I could fix the problem in one line:
guard currentItem.status.rawValue == AVPlayerItem.Status.readyToPlay.rawValue else {return}
You need to guard against two additional things:
currentPlayer.currentItem.status == .readyToPlay
currentPlayer.currentItem.duration >= CMTime.zero
In my periodicTimeObserver I change the (time) to seconds then check if the value is NaN or Infinite before updating anything.
_ = player?.addPeriodicTimeObserver(forInterval: interval, queue: DispatchQueue.main, using: { [weak self] (time) in
let seconds = CMTimeGetSeconds(time)
guard !(seconds.isNaN || seconds.isInfinite) else {
return
}
// the rest of your code
})
I am wondering what the best solution to looping audio for a defined duration on iOS is.
I am currently playing around with
AVAudioPlayer (where I can define a repeat count but can't define an end-time)
AVPlayer (where I can define a forwardPlaybackEndTime bot not a loop count)
AVPlayerLooper (that I don't yet fully understand)
So what I need is to define a duration for which a certain sound-file is repeated. F.e. I have a 8 second mp3 and want to play it for f.e one minute.
What would also be suuuuper great, is if I could cross-fade when it starts over again.
You were on the right track with AVPlayerLooper.
This is how you setup AVPlayerLooper
var playerLooper: AVPlayerLooper!
var player: AVQueuePlayer!
func play(_ url: URL) {
let asset = AVAsset(url: url)
let playerItem = AVPlayerItem(asset: asset)
player = AVQueuePlayer(playerItem: playerItem)
playerLooper = AVPlayerLooper(player: player, templateItem: playerItem)
player.play()
}
To stop the loop after a set amount of time you can use addBoundaryTimeObserver(forTimes:queue:using:)
For example:
let assetDuration = CMTimeGetSeconds(asset.duration)
let maxDuration = 60.0 // Define max duration
let maxLoops = floor(maxDuration / assetDuration)
let lastLoopDuration = maxDuration - (assetDuration * maxLoops)
let boundaryTime = CMTimeMakeWithSeconds(lastLoopDuration, preferredTimescale: 1)
let boundaryTimeValue = NSValue(time: boundaryTime)
player.addBoundaryTimeObserver(forTimes: [boundaryTimeValue], queue: DispatchQueue.main) { [weak self] in
if self?.playerLooper.loopCount == Int(maxLoops) {
self?.player.pause()
}
}
For fading in/out you have to set the audioMix property to your AVPlayerItem instance before using it.
let introRange = CMTimeRangeMake(start: CMTimeMakeWithSeconds(0, preferredTimescale: 1), duration: CMTimeMakeWithSeconds(1, preferredTimescale: 1))
let endingSecond = CMTimeRangeMake(start: CMTimeMakeWithSeconds(assetDuration - 1, preferredTimescale: 1), duration: CMTimeMakeWithSeconds(1, preferredTimescale: 1))
let inputParams = AVMutableAudioMixInputParameters(track: asset.tracks.first! as AVAssetTrack)
inputParams.setVolumeRamp(fromStartVolume: 0, toEndVolume: 1, timeRange: introRange)
inputParams.setVolumeRamp(fromStartVolume: 1, toEndVolume: 0, timeRange: endingSecond)
let audioMix = AVMutableAudioMix()
audioMix.inputParameters = [inputParams]
playerItem.audioMix = audioMix
Complete function:
func play(_ url: URL) {
let asset = AVAsset(url: url)
let playerItem = AVPlayerItem(asset: asset)
let assetDuration = CMTimeGetSeconds(asset.duration)
let introRange = CMTimeRangeMake(start: CMTimeMakeWithSeconds(0, preferredTimescale: 1), duration: CMTimeMakeWithSeconds(1, preferredTimescale: 1))
let endingSecond = CMTimeRangeMake(start: CMTimeMakeWithSeconds(assetDuration - 1, preferredTimescale: 1), duration: CMTimeMakeWithSeconds(1, preferredTimescale: 1))
let inputParams = AVMutableAudioMixInputParameters(track: asset.tracks.first! as AVAssetTrack)
inputParams.setVolumeRamp(fromStartVolume: 0, toEndVolume: 1, timeRange: introRange)
inputParams.setVolumeRamp(fromStartVolume: 1, toEndVolume: 0, timeRange: endingSecond)
let audioMix = AVMutableAudioMix()
audioMix.inputParameters = [inputParams]
playerItem.audioMix = audioMix
player = AVQueuePlayer(playerItem: playerItem)
playerLooper = AVPlayerLooper(player: player, templateItem: playerItem)
player.play()
let maxDuration = 60.0 // Define max duration
let maxLoops = floor(maxDuration / assetDuration)
let lastLoopDuration = maxDuration - (assetDuration * maxLoops)
let boundaryTime = CMTimeMakeWithSeconds(lastLoopDuration, preferredTimescale: 1)
let boundaryTimeValue = NSValue(time: boundaryTime)
player.addBoundaryTimeObserver(forTimes: [boundaryTimeValue], queue: DispatchQueue.main) { [weak self] in
if self?.playerLooper.loopCount == Int(maxLoops) {
self?.player.pause()
}
}
}
In my application I have used AVPlayer to play videos, If we play a video and go bock to previous page video is playing in backgraound well but if we play the same video again we have to show continuation of the video and it should not begin from start , Here I can able to set slider position according to the video duration but the screen not showing video content sound is coming.
For first time to play any video i called below method
func setup() {
UserDefaults.standard.set(videoString as NSString, forKey: CurrentURL)
UserDefaults.standard.synchronize()
let targetTime:CMTime = CMTimeMake(0, 1)
mediaPlayer.seek(to: targetTime)
mediaPlayer.pause()
print("Video URL \(videoString)")
let url = NSURL(string: videoString)
let playerItem = AVPlayerItem(url: url! as URL)
mediaPlayer=AVPlayer(playerItem: playerItem)
let playerLayer=AVPlayerLayer(player: mediaPlayer)
playerLayer.frame = self.view.bounds
playerLayer.backgroundColor = UIColor.black.cgColor
//playerLayer.videoGravity = AVLayerVideoGravityResize
playerLayer.videoGravity = AVLayerVideoGravityResizeAspect
self.view.layer.addSublayer(playerLayer)
AppDelegate.shared().showLoading()
videoView.isHidden = false
mediaPlayer.play()
let duration : CMTime = mediaPlayer.currentItem!.asset.duration
let seconds : Float64 = CMTimeGetSeconds(duration)
videoSlider.maximumValue = Float(seconds)
videoPlayButton.setImage(UIImage(named: "Pause"), for: .normal)
videoPlaying = true
//Adding Default Periodic Observer On Player
mediaPlayer.addPeriodicTimeObserver(forInterval: CMTimeMakeWithSeconds(1, 1), queue: DispatchQueue.main) { (CMTime) -> Void in
if mediaPlayer.currentItem?.status == .readyToPlay {
AppDelegate.shared().removeLoading()
self.videoSlider.value = Float(CMTimeGetSeconds(mediaPlayer.currentTime()))
let currentTime : Int = Int(CMTimeGetSeconds(mediaPlayer.currentTime()))
let minutes = currentTime/60
let seconds = currentTime - minutes * 60
self.durationLabel.text = NSString(format: "%02d:%02d", minutes,seconds) as String
}
}
}
If user plays same video second time i'm checking current url which is stored in userdefaults if it is playing same video i did like following
let runningSrtring = UserDefaults.standard.object(forKey: CurrentURL) as! NSString
if runningSrtring.isEqual(to: videoString) {
//self.view.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
videoPlayButton.setImage(UIImage(named: "Pause"), for: .normal)
let duration : CMTime = mediaPlayer.currentItem!.asset.duration
let seconds : Float64 = CMTimeGetSeconds(duration)
self.videoSlider.value = Float(CMTimeGetSeconds((mediaPlayer.currentTime())))
videoSlider.minimumValue = 0
videoSlider.maximumValue = Float(seconds)
let playerLayer = AVPlayerLayer(player: mediaPlayer)
playerLayer.frame = self.view.frame
playerLayer.backgroundColor = UIColor.black.cgColor
//playerLayer.videoGravity = AVLayerVideoGravityResize
playerLayer.videoGravity = AVLayerVideoGravityResizeAspect
self.view.layer.addSublayer(playerLayer)
mediaPlayer.addPeriodicTimeObserver(forInterval: CMTimeMakeWithSeconds(1, 1), queue: DispatchQueue.main) { (CMTime) -> Void in
if mediaPlayer.currentItem?.status == .readyToPlay {
self.videoSlider.value = Float(CMTimeGetSeconds((mediaPlayer.currentTime())))
let currentTime : Int = Int(CMTimeGetSeconds(mediaPlayer.currentTime()))
let minutes = currentTime/60
let seconds = currentTime - minutes * 60
self.durationLabel.text = NSString(format: "%02d:%02d", minutes,seconds) as String
}
}
}
But the lauer is not showing video content if i play same video again , here if the user play same video which is already running it should continue from current duration instead of restarting
If I have a MPMoviePlayerController in Swift:
MPMoviePlayerController mp = MPMoviePlayerController(contentURL: url)
Is there a way I can get the number of frames within the video located at url? If not, is there some other way to determine the frame count?
I don't think MPMoviePlayerController can help you.
Use an AVAssetReader and count the number of CMSampleBuffers it returns to you. You can configure it to not even decode the frames, effectively parsing the file, so it should be fast and memory efficient.
Something like
var asset = AVURLAsset(URL: url, options: nil)
var reader = AVAssetReader(asset: asset, error: nil)
var videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
var readerOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) // NB: nil, should give you raw frames
reader.addOutput(readerOutput)
reader.startReading()
var nFrames = 0
while true {
var sampleBuffer = readerOutput.copyNextSampleBuffer()
if sampleBuffer == nil {
break
}
nFrames++
}
println("Num frames: \(nFrames)")
Sorry if that's not idiomatic, I don't know swift.
Swift 5
func getNumberOfFrames(url: URL) -> Int {
let asset = AVURLAsset(url: url, options: nil)
do {
let reader = try AVAssetReader(asset: asset)
//AVAssetReader(asset: asset, error: nil)
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
let readerOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) // NB: nil, should give you raw frames
reader.add(readerOutput)
reader.startReading()
var nFrames = 0
while true {
let sampleBuffer = readerOutput.copyNextSampleBuffer()
if sampleBuffer == nil {
break
}
nFrames = nFrames+1
}
print("Num frames: \(nFrames)")
return nFrames
}catch {
print("Error: \(error)")
}
return 0
}
You could also use frames per second to calculate total frames.
var player: AVPlayer?
var playerController = AVPlayerViewController()
var videoFPS: Int = 0
var totalFrames: Int?
guard let videoURL = "" else { return }
player = AVPlayer(url: videoURL)
playerController.player = player
guard player?.currentItem?.asset != nil else {
return
}
let asset = self.player?.currentItem?.asset
let tracks = asset!.tracks(withMediaType: .video)
let fps = tracks.first?.nominalFrameRate
let duration = self.player?.currentItem?.duration
self.videoFPS = lround(Double(fps!))
self.totalFrames = lround(Double(self!.videoFPS) * durationSeconds)