AVPlayerViewController control in iPhoneX - ios

I am not able to find the solution to display AVPlayerViewController control in iPhoneX. By using AVPlayerViewController.gravity = .aspectfill shows control but video fills whole screen and clips remaining portions. Any idea?? Here is my code:
func loadVideoURL () {
let fileURL = URL.init(string: (exerciseDetailsObj?.video_file)!)
avPlayer = AVPlayer(url: fileURL!)
let playerController = AVPlayerViewController()
playerController.player = avPlayer
playerController.view.frame = CGRect(x: 0, y: 0, width:viewPlayerContainer.bounds.size.width, height: viewPlayerContainer.bounds.size.height)
playerController.showsPlaybackControls = false
avPlayerController = playerController
self.addChild(avPlayerController)
viewPlayerContainer.addSubview(avPlayerController.view)
viewPlayerContainer.sendSubviewToBack(avPlayerController.view)
avPlayerController.didMove(toParent: self)
avPlayerController.addObserver(self, forKeyPath: "videoBounds", options: NSKeyValueObservingOptions.new, context: nil)
}
}
func enterFullscreen(playerViewController: AVPlayerViewController) {
playerViewController.showsPlaybackControls = true //not dispalyed in iphonX
let selectorName: String = {
if #available(iOS 11.3, *) {
return "_transitionToFullScreenAnimated:interactive:completionHandler:"
} else if #available(iOS 11, *) {
return "_transitionToFullScreenAnimated:completionHandler:"
} else {
return "_transitionToFullScreenViewControllerAnimated:completionHandler:"
}
}()
let selectorToForceFullScreenMode = NSSelectorFromString(selectorName)
if playerViewController.responds(to: selectorToForceFullScreenMode) {
playerViewController.perform(selectorToForceFullScreenMode, with: true, with: nil)
}
}
func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?)
{
if keyPath == "videoBounds"
{
if let rect = change?[.newKey] as? NSValue
{
if let newrect = rect.cgRectValue as CGRect?
{
if newrect.size.height <= 214
{
avPlayerController.showsPlaybackControls = false
}
}
}
}
}

Check out this. Refer the code and change accordingly into your code.
#IBOutlet weak var videoPlayerView: UIView! // Take UIview on storyboard on which you want to display video
var player: AVPlayer?
var playerController = AVPlayerLayer()
override func viewDidLoad()
{
self.videoPlayerView.layer.insertSublayer(playerController, at: 0)
player = AVPlayer(url: videoUrl!)
playerController.player = player
self.player?.play()
}
override func viewDidLayoutSubviews() {
playerController.videoGravity = AVLayerVideoGravity(rawValue: AVLayerVideoGravity.resizeAspectFill.rawValue)
playerController.frame = self.videoPlayerView.bounds
}

Related

How to record an audio stream for save it in file / swift 4.2

I am creating a radio application for iPhone (coded in Swift 4.2) and I want to add a function allowing me to record and save in a file, the sound produced by my radio (read from an AVPlayer) when I push a button. Which code should I use?
The code is in Swift 4.2, with Xcode 10.1.
I search on the web : "How to record an audio stream swift 4.2", "How to record audio from AVPlayer swift 4.2", but I can't find an answer.
My code:
import UIKit
import AVFoundation
import MediaPlayer
class ViewControllerPlayer: UIViewController {
var URl = "http://link_of_audio_stream"
var player:AVPlayer?
var playerItem:AVPlayerItem?
var playerLayer:AVPlayerLayer?
override func viewDidLoad() {
super.viewDidLoad()
let url = URL(string: URl)
let playerItem1:AVPlayerItem = AVPlayerItem(url: url!)
player = AVPlayer(playerItem: playerItem1)
}
#IBAction func Play(_ sender: Any) {
player?.play()
}
#IBAction func Pause(_ sender: Any) {
player?.pause()
}
private var audioRecorder: AVAudioRecorder!
func startRecording() throws {
guard let newFileURL = createURLForNewRecord() else {
throw RecordingServiceError.canNotCreatePath
}
do {
var urlString = URL(string: URl)
urlString = newFileURL
audioRecorder = try AVAudioRecorder(url: newFileURL,
settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 8000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
audioRecorder.delegate = self as? AVAudioRecorderDelegate
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: TimeConstants.recordDuration)
//error: Use of unresolved identifier 'TimeConstants'
} catch let error {
print(error)
}
}
func STOPREC1() throws {
audioRecorder.stop()
audioRecorder = nil
print("Recording finished successfully.")
}
enum RecordingServiceError: String, Error {
case canNotCreatePath = "Can not create path for new recording"
}
private func createURLForNewRecord() -> URL? {
guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
return nil
}
let date = String(describing: Date())
let fullFileName = "Enregistrement radio " + date + ".m4a"
let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
return newRecordFileName
}
}
extension FileManager {
class func getAppFolderURL() -> URL? {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
}
After multiple internet search, I found the solution.
I found this Swift Class somewhere on internet named « CachingPlayerItem.swift », it will allow the to record an online audio stream.
import Foundation
import AVFoundation
fileprivate extension URL {
func withScheme(_ scheme: String) -> URL? {
var components = URLComponents(url: self, resolvingAgainstBaseURL: false)
components?.scheme = scheme
return components?.url
}
}
#objc protocol CachingPlayerItemDelegate {
/// Is called when the media file is fully downloaded.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, didFinishDownloadingData data: Data)
/// Is called every time a new portion of data is received.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, didDownloadBytesSoFar bytesDownloaded: Int, outOf bytesExpected: Int)
/// Is called after initial prebuffering is finished, means
/// we are ready to play.
#objc optional func playerItemReadyToPlay(_ playerItem: CachingPlayerItem)
/// Is called when the data being downloaded did not arrive in time to
/// continue playback.
#objc optional func playerItemPlaybackStalled(_ playerItem: CachingPlayerItem)
/// Is called on downloading error.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, downloadingFailedWith error: Error)
}
open class CachingPlayerItem: AVPlayerItem {
class ResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate, URLSessionDataDelegate, URLSessionTaskDelegate {
var playingFromData = false
var mimeType: String? // is required when playing from Data
var session: URLSession?
var mediaData: Data?
var response: URLResponse?
var pendingRequests = Set<AVAssetResourceLoadingRequest>()
weak var owner: CachingPlayerItem?
var fileURL: URL!
var outputStream: OutputStream?
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if playingFromData {
// Nothing to load.
} else if session == nil {
// If we're playing from a url, we need to download the file.
// We start loading the file on first request only.
guard let initialUrl = owner?.url else {
fatalError("internal inconsistency")
}
startDataRequest(with: initialUrl)
}
pendingRequests.insert(loadingRequest)
processPendingRequests()
return true
}
func startDataRequest(with url: URL) {
var recordingName = "record.mp3"
if let recording = owner?.recordingName{
recordingName = recording
}
fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
.appendingPathComponent(recordingName)
let configuration = URLSessionConfiguration.default
configuration.requestCachePolicy = .reloadIgnoringLocalAndRemoteCacheData
session = URLSession(configuration: configuration, delegate: self, delegateQueue: nil)
session?.dataTask(with: url).resume()
outputStream = OutputStream(url: fileURL, append: true)
outputStream?.schedule(in: RunLoop.current, forMode: RunLoop.Mode.default)
outputStream?.open()
}
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, didCancel loadingRequest: AVAssetResourceLoadingRequest) {
pendingRequests.remove(loadingRequest)
}
// MARK: URLSession delegate
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
let bytesWritten = data.withUnsafeBytes{outputStream?.write($0, maxLength: data.count)}
}
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive response: URLResponse, completionHandler: #escaping (URLSession.ResponseDisposition) -> Void) {
completionHandler(Foundation.URLSession.ResponseDisposition.allow)
mediaData = Data()
self.response = response
processPendingRequests()
}
func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
if let errorUnwrapped = error {
owner?.delegate?.playerItem?(owner!, downloadingFailedWith: errorUnwrapped)
return
}
processPendingRequests()
owner?.delegate?.playerItem?(owner!, didFinishDownloadingData: mediaData!)
}
// MARK: -
func processPendingRequests() {
// get all fullfilled requests
let requestsFulfilled = Set<AVAssetResourceLoadingRequest>(pendingRequests.compactMap {
self.fillInContentInformationRequest($0.contentInformationRequest)
if self.haveEnoughDataToFulfillRequest($0.dataRequest!) {
$0.finishLoading()
return $0
}
return nil
})
// remove fulfilled requests from pending requests
_ = requestsFulfilled.map { self.pendingRequests.remove($0) }
}
func fillInContentInformationRequest(_ contentInformationRequest: AVAssetResourceLoadingContentInformationRequest?) {
if playingFromData {
contentInformationRequest?.contentType = self.mimeType
contentInformationRequest?.contentLength = Int64(mediaData!.count)
contentInformationRequest?.isByteRangeAccessSupported = true
return
}
guard let responseUnwrapped = response else {
// have no response from the server yet
return
}
contentInformationRequest?.contentType = responseUnwrapped.mimeType
contentInformationRequest?.contentLength = responseUnwrapped.expectedContentLength
contentInformationRequest?.isByteRangeAccessSupported = true
}
func haveEnoughDataToFulfillRequest(_ dataRequest: AVAssetResourceLoadingDataRequest) -> Bool {
let requestedOffset = Int(dataRequest.requestedOffset)
let requestedLength = dataRequest.requestedLength
let currentOffset = Int(dataRequest.currentOffset)
guard let songDataUnwrapped = mediaData,
songDataUnwrapped.count > currentOffset else {
return false
}
let bytesToRespond = min(songDataUnwrapped.count - currentOffset, requestedLength)
let dataToRespond = songDataUnwrapped.subdata(in: Range(uncheckedBounds: (currentOffset, currentOffset + bytesToRespond)))
dataRequest.respond(with: dataToRespond)
return songDataUnwrapped.count >= requestedLength + requestedOffset
}
deinit {
session?.invalidateAndCancel()
}
}
fileprivate let resourceLoaderDelegate = ResourceLoaderDelegate()
fileprivate let url: URL
fileprivate let initialScheme: String?
fileprivate var customFileExtension: String?
weak var delegate: CachingPlayerItemDelegate?
func stopDownloading(){
resourceLoaderDelegate.session?.invalidateAndCancel()
}
open func download() {
if resourceLoaderDelegate.session == nil {
resourceLoaderDelegate.startDataRequest(with: url)
}
}
private let cachingPlayerItemScheme = "cachingPlayerItemScheme"
var recordingName = "record.mp3"
/// Is used for playing remote files.
convenience init(url: URL, recordingName: String) {
self.init(url: url, customFileExtension: nil, recordingName: recordingName)
}
/// Override/append custom file extension to URL path.
/// This is required for the player to work correctly with the intended file type.
init(url: URL, customFileExtension: String?, recordingName: String) {
guard let components = URLComponents(url: url, resolvingAgainstBaseURL: false),
let scheme = components.scheme,
var urlWithCustomScheme = url.withScheme(cachingPlayerItemScheme) else {
fatalError("Urls without a scheme are not supported")
}
self.recordingName = recordingName
self.url = url
self.initialScheme = scheme
if let ext = customFileExtension {
urlWithCustomScheme.deletePathExtension()
urlWithCustomScheme.appendPathExtension(ext)
self.customFileExtension = ext
}
let asset = AVURLAsset(url: urlWithCustomScheme)
asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
resourceLoaderDelegate.owner = self
addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
}
/// Is used for playing from Data.
init(data: Data, mimeType: String, fileExtension: String) {
guard let fakeUrl = URL(string: cachingPlayerItemScheme + "://whatever/file.\(fileExtension)") else {
fatalError("internal inconsistency")
}
self.url = fakeUrl
self.initialScheme = nil
resourceLoaderDelegate.mediaData = data
resourceLoaderDelegate.playingFromData = true
resourceLoaderDelegate.mimeType = mimeType
let asset = AVURLAsset(url: fakeUrl)
asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
resourceLoaderDelegate.owner = self
addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
}
// MARK: KVO
override open func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
delegate?.playerItemReadyToPlay?(self)
}
// MARK: Notification hanlers
#objc func playbackStalledHandler() {
delegate?.playerItemPlaybackStalled?(self)
}
// MARK: -
override init(asset: AVAsset, automaticallyLoadedAssetKeys: [String]?) {
fatalError("not implemented")
}
deinit {
NotificationCenter.default.removeObserver(self)
removeObserver(self, forKeyPath: "status")
resourceLoaderDelegate.session?.invalidateAndCancel()
}
}
After, in you main swift file, you put this code to record:
let recordingName = "my_rec_name.mp3"
var playerItem: CachingPlayerItem!
let url_stream = URL(string: "http://my_url_stream_link")
playerItem = CachingPlayerItem(url: url_stream!, recordingName: recordingName ?? "record.mp3")
var player1 = AVPlayer(playerItem: playerItem)
player1.automaticallyWaitsToMinimizeStalling = false
And to stop the record, you use this code:
playerItem.stopDownloading()
recordingName = nil
playerItem = nil
Recordings will be saved on the directory of your app.
I had a really hard time with this one so I am posting an answer.
Remember to add these lines to your info.plist:
Here is my controller that records the voice input and returns it to a previous controller:
import Foundation
import UIKit
import Speech
class SpeechToTextViewController: UIViewController {
#IBOutlet weak var animationView: UIView!
#IBOutlet weak var circleView: UIView!
#IBOutlet weak var micImage: UIImageView!
#IBOutlet weak var listeningLabel: UILabel!
#IBOutlet weak var buttonStartView: UIView!
#IBOutlet weak var cancelRecordingButton: UIButton!
#IBOutlet weak var stopRecordingButton: UIButton!
#IBOutlet weak var startRecordingButton: UIButton!
private let audioEngine = AVAudioEngine()
private let speechRecognizer = SFSpeechRecognizer(locale: Locale.init(identifier:"en-US"))
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest!
private var recognitionTask: SFSpeechRecognitionTask?
private var isRecording: Bool = false
var delegate: SpeechToTextViewDelegate?
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor(white: 1.0, alpha: 0.25)
self.stopRecordingButton.isHidden = true
self.listeningLabel.isHidden = true
}
#IBAction func startStopRecording(_ sender: Any) {
isRecording = !isRecording
if isRecording && !audioEngine.isRunning {
self.cancelRecordingButton.isHidden = true
self.startRecordingButton.isHidden = true
self.stopRecordingButton.isHidden = false
self.listeningLabel.isHidden = false
UIView.animate(withDuration: 1, animations: {}) { _ in
UIView.animate(withDuration: 1, delay: 0.25, options: [.autoreverse, .repeat], animations: {
self.circleView.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
})
}
do {
try recordSpeech()
} catch {
print(error)
}
} else {
self.listeningLabel.isHidden = true
stopRecording()
}
}
func recordSpeech() throws {
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
let node = audioEngine.inputNode
let recordingFormat = node.outputFormat(forBus: 0)
node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) {buffer, _ in
self.recognitionRequest.append(buffer)
}
audioEngine.prepare()
try audioEngine.start()
guard let myRecognizer = SFSpeechRecognizer() else {
print("myRecognizer is unable to be created")
return
}
if !myRecognizer.isAvailable
{
print("myRecognizer is not available")
return
}
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { result, error in
var isFinal = false
if let result = result
{
isFinal = result.isFinal
self.delegate?.appendMessage(result.bestTranscription.formattedString)
}
if error != nil || isFinal {
if error != nil {
print("error trying to capture speech to text")
print(error!)
}
self.stopRecording()
}
})
}
func stopRecording() {
if audioEngine.isRunning {
self.audioEngine.stop()
self.recognitionRequest.endAudio()
// Cancel the previous task if it's running
if let recognitionTask = recognitionTask {
recognitionTask.cancel()
self.recognitionTask = nil
}
}
delegate?.doneTalking()
self.dismiss(animated: true, completion: nil)
}
#IBAction func cancelRecording(_ sender: Any) {
delegate?.doneTalking()
self.dismiss(animated: true, completion: nil)
}
}
Use AVAudioRecorder for recording:
private var audioRecorder: AVAudioRecorder!
After you declared an audio recorder you can write a recording method:
func startRecording() throws {
guard let newFileURL = createURLForNewRecord() else {
throw RecordingServiceError.canNotCreatePath
}
do {
currentFileURL = newFileURL
audioRecorder = try AVAudioRecorder(url: newFileURL,
settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 8000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: TimeConstants.recordDuration)
} catch let error {
print(error)
}
}
And use some helper methods and structs:
enum RecordingServiceError: String, Error {
case canNotCreatePath = "Can not create path for new recording"
}
private func createURLForNewRecord() -> URL? {
guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
return nil
}
let fileNamePrefix = DateFormatter.stringFromDate(Date())
let fullFileName = "Record_" + fileNamePrefix + ".m4a"
let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
return newRecordFileName
}
extension FileManager {
class func getAppFolderURL() -> URL? {
return FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "you app bundle")
}
}

Need to show the image of video in mpmovieplayercontroller swift

I have already asked this doubt in my co workers profile, but no solution yet get. Please help me to solve this problem. i am struck more than 4 hours.
So far i did:
I use to choose the video from gallery and display that in screen using mpmovieplayercontroller. Here i am choosing 2 video one by one. If i select first video and to display and when i move to load second video my first video screen is in black. But i need to display the video image in that player. How to do that.please help me out
My code:
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia
class ViewController: UIViewController,UIGestureRecognizerDelegate {
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false
// swt duplicate image for thumbnail image for audio
#IBOutlet weak var musicImg: UIImageView!
var videoPlayer = MPMoviePlayerController()
var mediaUI = UIImagePickerController()
var videoURL = NSURL()
override func viewDidLoad() {
super.viewDidLoad()
musicImg.hidden = true
let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")
gestureRecognizer.delegate = self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
}
func handleTap(gestureRecognizer: UIGestureRecognizer) {
//location = gestureRecognizer .locationInView(videoPlayer.view)
print("tapped")
}
//#pragma mark - gesture delegate
// this allows you to dispatch touches
func gestureRecognizer(gestureRecognizer: UIGestureRecognizer, shouldReceiveTouch touch: UITouch) -> Bool {
return true
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWithGestureRecognizer otherGestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
return false
}
let mediaUI = UIImagePickerController()
mediaUI.sourceType = .SavedPhotosAlbum
mediaUI.mediaTypes = [kUTTypeMovie as String]
mediaUI.allowsEditing = true
mediaUI.delegate = delegate
presentViewController(mediaUI, animated: true, completion: nil)
return true
}
// after merge all video and audio. the final video will be saved in gallery and also will display like preview
func exportDidFinish(session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.Completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
if error != nil {
print("some files went wrong")
} else {
// get the output url to display the final video in screen
self.videoURL = outputURL!
self.mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = self.videoURL
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.backgroundView.backgroundColor = UIColor.clearColor()
self.videoPlayer.fullscreen = true
self.videoPlayer.view.frame = CGRectMake(38, 442, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.prepareToPlay()
}
})
}
}
Asset1 = nil
Asset2 = nil
Asset3 = nil
audioAsset = nil
}
// click first video
#IBAction func FirstVideo(sender: AnyObject) {
loadingAssetOne = true
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// clcik second video
#IBAction func SecondVideo(sender: AnyObject) {
loadingAssetOne = false
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// click audio
#IBAction func Audio(sender: AnyObject) {
let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
mediaPickerController.delegate = self
mediaPickerController.prompt = "Select Audio"
presentViewController(mediaPickerController, animated: true, completion: nil)
}
#IBAction func playPreview(sender: AnyObject) {
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// orientation for the video
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.Up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .Right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .Left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .Up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .Down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
atTime: kCMTimeZero)
} else {
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
if assetInfo.orientation == .Down {
let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
let windowBounds = UIScreen.mainScreen().bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
}
instruction.setTransform(concat, atTime: kCMTimeZero)
}
return instruction
}
// merge all file
#IBAction func MergeAll(sender: AnyObject) {
if let firstAsset = Asset1, secondAsset = Asset2 {
let mixComposition = AVMutableComposition()
//load first video
let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
atTime: kCMTimeZero)
} catch _ {
}
// load second video
let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
atTime: firstAsset.duration)
} catch _ {
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: UIScreen.mainScreen().bounds.width, height: UIScreen.mainScreen().bounds.height)
//load audio
if let loadedAudioAsset = audioAsset {
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] ,
atTime: kCMTimeZero)
} catch _ {
}
}
// save the final video to gallery
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .ShortStyle
let date = dateFormatter.stringFromDate(NSDate())
// let savePath = documentDirectory.URLByAppendingPathComponent("mergeVideo-\(date).mov")
let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("final-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.shouldOptimizeForNetworkUse = true
exporter!.videoComposition = mainComposition
exporter!.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter!)
})
}
}
}
}
extension ViewController: UIImagePickerControllerDelegate {
// display the first & second video after it picked from gallery
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) {
let mediaType = info[UIImagePickerControllerMediaType] as! NSString
dismissViewControllerAnimated(true, completion: nil)
if mediaType == kUTTypeMovie {
let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
if loadingAssetOne {
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = videoURL
self.videoPlayer.view.frame = CGRectMake(38, 57, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.prepareToPlay()
self.videoPlayer.play()
Asset1 = avAsset
} else {
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = videoURL
self.videoPlayer.view.frame = CGRectMake(38, 206, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.prepareToPlay()
Asset2 = avAsset
}
}
}
}
extension ViewController: UINavigationControllerDelegate {
}
extension ViewController: MPMediaPickerControllerDelegate {
func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
let selectedSongs = mediaItemCollection.items
if selectedSongs.count > 0 {
let song = selectedSongs[0]
if let vURL = song.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL {
audioAsset = AVAsset(URL: vURL)
dismissViewControllerAnimated(true, completion: nil)
mediaUI.dismissViewControllerAnimated(true, completion: nil)
musicImg.hidden = false
let alert = UIAlertController(title: "yes", message: "Audio Loaded", preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
presentViewController(alert, animated: true, completion: nil)
} else {
dismissViewControllerAnimated(true, completion: nil)
let alert = UIAlertController(title: "No audio", message: "Audio Not Loaded", preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
presentViewController(alert, animated: true, completion: nil)
}
} else {
dismissViewControllerAnimated(true, completion: nil)
}
}
func mediaPickerDidCancel(mediaPicker: MPMediaPickerController) {
dismissViewControllerAnimated(true, completion: nil)
}
}
I need to show the image of selected video in screen.Please help me to resolve,Thanks
I follow this link tutorial here
If want to play more than one video in same view, you can use AVPlayer of AVFoundation.I have read more about this in StackOverflow and AppleDoucument and Other forums.All they say is not possible using MPMoviePlayerViewController.But it is possible using AVPlayer.
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia
class ViewController: UIViewController,UIGestureRecognizerDelegate {
var optionalInteger: Int?
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false
#IBOutlet weak var musicImg: UIImageView!
#IBOutlet var videoView: UIView!
#IBOutlet var videoViewTwo: UIView!
var player : AVPlayer? = nil
var playerLayer : AVPlayerLayer? = nil
var asset : AVAsset? = nil
var playerItem: AVPlayerItem? = nil
override func viewDidLoad()
{
super.viewDidLoad()
musicImg.hidden = true
let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")
gestureRecognizer.delegate = self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
imageViewVideoOne.hidden = true
imageViewVideoTwo.hidden = true
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool
{
if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
return false
}
let mediaUI = UIImagePickerController()
mediaUI.sourceType = .SavedPhotosAlbum
mediaUI.mediaTypes = [kUTTypeMovie as String]
mediaUI.allowsEditing = true
mediaUI.delegate = delegate
presentViewController(mediaUI, animated: true, completion: nil)
return true
}
// click first video
#IBAction func FirstVideo(sender: AnyObject) {
loadingAssetOne = true
optionalInteger = 0
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// clcik second video
#IBAction func SecondVideo(sender: AnyObject) {
loadingAssetOne = false
optionalInteger = 1
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// click audio
#IBAction func Audio(sender: AnyObject) {
let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
mediaPickerController.delegate = self
mediaPickerController.prompt = "Select Audio"
presentViewController(mediaPickerController, animated: true, completion: nil)
}
#IBAction func playPreview(sender: AnyObject)
{
startMediaBrowserFromViewController(self, usingDelegate: self)
}
extension ViewController: UIImagePickerControllerDelegate
{
// display the first & second video after it picked from gallery
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject])
{
let mediaType = info[UIImagePickerControllerMediaType] as! NSString
dismissViewControllerAnimated(true, completion: nil)
if mediaType == kUTTypeMovie
{
if loadingAssetOne
{
let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
print(avAsset)
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
self.videoURL = videoURLWithPath!
asset = AVAsset(URL: videoURL) as AVAsset
playerItem = AVPlayerItem(asset: asset!)
player = AVPlayer (playerItem: self.playerItem!)
playerLayer = AVPlayerLayer(player: self.player)
videoView.frame = CGRectMake(38, 57, 220, 106)
playerLayer?.frame = videoView.frame
videoView.layer.addSublayer(self.playerLayer!)
player!.play()
}
else
{
let avAssetTwo = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
print(avAssetTwo)
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
self.videoURL = videoURLWithPath!
asset = AVAsset(URL: videoURL) as AVAsset
playerItem = AVPlayerItem(asset: asset!)
player = AVPlayer (playerItem: self.playerItem!)
playerLayer = AVPlayerLayer(player: self.player)
videoView.frame = CGRectMake(38, 206, 220, 106)
playerLayer?.frame = videoView.frame
videoView.layer.addSublayer(self.playerLayer!)
player!.play()
}
}
}
}
Sources for your question
Multiple Video same screen
Multiple Video Playback on iOS
Apple MPMoviePlayerViewController Document
Playing Multiple Video Simultaneously
Apple AVPlayer Document
Playing Multiple Videos
Playback
Playing Video in iOS

Issue with "func observeValueForKeyPath(keyPath : NSString, object : AnyObject, change : NSDictionary, context : Void)" in Swift

I have added an observer for an AVPlayer like this
self.audioPlayer.addObserver(self, forKeyPath: "currentItem.status", options: [NSKeyValueObservingOptions.New, NSKeyValueObservingOptions.Initial], context: nil)
And called
func observeValueForKeyPath(keyPath : NSString, object : AnyObject, change : NSDictionary, context : Void) {
if object as! AVPlayer == self.audioPlayer && keyPath.isEqualToString("currentItem.status")
{
let playerStatus = self.audioPlayer.currentItem!.status.rawValue as Int
if playerStatus == AVPlayerStatus.Failed.rawValue
{
self.replaceCurrentItem(self.fileName)
}
else if playerStatus == AVPlayerStatus.ReadyToPlay.rawValue
{
self.playAudio()
self.updateDurationPeriodcally()
self.updateInfo()
}
else if playerStatus == AVPlayerStatus.Unknown.rawValue
{
print("\(self.audioPlayer.currentItem!.error)")
}
}
}
Problem is that observeValueForKeyPath is never called.
Whole Code
func loadPlayer (urlString : NSString)
{
let url : NSURL = NSURL(string: urlString as String)!
let length = (url.absoluteString).characters.count as Int
if length == 0
{
return
}
let asset = AVURLAsset(URL: url, options: nil)
let anItem = AVPlayerItem(asset: asset)
self.audioPlayer = AVPlayer(playerItem: anItem)
self.audioPlayer.volume = 1.0
self.audioPlayer.play()
addObserver(self,
forKeyPath: ObservatingKeyPath,
options: NSKeyValueObservingOptions.New,
context: PlayerStatusObservingContext)
self.audioPlayer.addObserver(self, forKeyPath: "currentItem.status", options: [NSKeyValueObservingOptions.New, NSKeyValueObservingOptions.Initial], context: nil)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "itemDidFinishPlaying:", name:AVPlayerItemDidPlayToEndTimeNotification, object: nil)
}
How do you observe the player item. It might be that you might be observing it incorrectly. Here is a simple illustrations to show how to correctly observe for the status of AVPlayer,
class TestViewController: UIViewController
{
private var player: AVPlayer!
private var playingIndex = 0
private var songs = ["UpTown.mp3", "TurnMeOn.mp3"]
private let ObservatingKeyPath = "currentItem.status"
private let PlayerStatusObservingContext = UnsafeMutablePointer<Int>(bitPattern: 1)
override func viewDidLoad()
{
super.viewDidLoad()
createPlayer()
setupGestures()
playNext()
}
func createPlayer()
{
player = AVPlayer()
player.addObserver(self,
forKeyPath: ObservatingKeyPath,
options: NSKeyValueObservingOptions.New,
context: PlayerStatusObservingContext)
}
func setupGestures()
{
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: "playNext")
view.addGestureRecognizer(tapGestureRecognizer)
}
func playNext()
{
let url = NSBundle.mainBundle().URLForResource(songs[playingIndex], withExtension: nil)
let playerItem = AVPlayerItem(URL: url!)
player.replaceCurrentItemWithPlayerItem(playerItem)
player.play()
playingIndex = (++playingIndex % 2)
}
override func observeValueForKeyPath(keyPath: String?,
ofObject object: AnyObject?,
change: [NSObject : AnyObject]?,
context: UnsafeMutablePointer<Void>)
{
if context == PlayerStatusObservingContext
{
if let change = change as? [String: Int]
{
let newChange = change[NSKeyValueChangeNewKey]!
switch newChange
{
case AVPlayerItemStatus.ReadyToPlay.rawValue:
print("Ready to play")
case AVPlayerItemStatus.Unknown.rawValue:
print("Unknown")
default:
print("Other status")
}
}
return
}
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
}
}
And it prints out the following when starting or changing the song,
Unknown
Ready to play

How to play video with AVPlayerViewController (AVKit) in Swift

How do you play a video with AV Kit Player View Controller in Swift?
override func viewDidLoad() {
super.viewDidLoad()
let videoURLWithPath = "http://****/5.m3u8"
let videoURL = NSURL(string: videoURLWithPath)
playerViewController = AVPlayerViewController()
dispatch_async(dispatch_get_main_queue()) {
self.playerViewController?.player = AVPlayer.playerWithURL(videoURL) as AVPlayer
}
}
Swift 3.x - 5.x
Necessary: import AVKit, import AVFoundation
AVFoundation framework is needed even if you use AVPlayer
If you want to use AVPlayerViewController:
let videoURL = URL(string: "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")
let player = AVPlayer(url: videoURL!)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
or just AVPlayer:
let videoURL = URL(string: "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")
let player = AVPlayer(url: videoURL!)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
It's better to put this code into the method: override func viewDidAppear(_ animated: Bool) or somewhere after.
Objective-C
AVPlayerViewController:
NSURL *videoURL = [NSURL URLWithString:#"https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"];
AVPlayer *player = [AVPlayer playerWithURL:videoURL];
AVPlayerViewController *playerViewController = [AVPlayerViewController new];
playerViewController.player = player;
[self presentViewController:playerViewController animated:YES completion:^{
[playerViewController.player play];
}];
or just AVPlayer:
NSURL *videoURL = [NSURL URLWithString:#"https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"];
AVPlayer *player = [AVPlayer playerWithURL:videoURL];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = self.view.bounds;
[self.view.layer addSublayer:playerLayer];
[player play];
Try this, definitely works for Swift 2.0
let player = AVPlayer(URL: url)
let playerController = AVPlayerViewController()
playerController.player = player
self.addChildViewController(playerController)
self.view.addSubview(playerController.view)
playerController.view.frame = self.view.frame
player.play()
Swift 5+
First of all you have to define 2 variables globally inside your view controller.
var player: AVPlayer!
var playerViewController: AVPlayerViewController!
Here I'm adding player to a desired view.
#IBOutlet weak var playerView: UIView!
Then add following code to the viewDidLoad method.
let videoURL = URL(string: "videoUrl")
self.player = AVPlayer(url: videoURL!)
self.playerViewController = AVPlayerViewController()
playerViewController.player = self.player
playerViewController.view.frame = self.playerView.frame
playerViewController.player?.pause()
self.playerView.addSubview(playerViewController.view)
If you are not defining player and playerViewController globally, you won't be able to embed player.
Try This
var player:AVPlayer!
var avPlayerLayer:AVPlayerLayer = AVPlayerLayer(player: player)
avPlayerLayer.frame = CGRectMake(your frame)
self.view.layer .addSublayer(avPlayerLayer)
var steamingURL:NSURL = NSURL(string:playerURL)
player = AVPlayer(URL: steamingURL)
player.play()
Swift 3.0 Full source code:
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController,AVPlayerViewControllerDelegate
{
var playerController = AVPlayerViewController()
#IBAction func Play(_ sender: Any)
{
let path = Bundle.main.path(forResource: "video", ofType: "mp4")
let url = NSURL(fileURLWithPath: path!)
let player = AVPlayer(url:url as URL)
playerController = AVPlayerViewController()
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.didfinishplaying(note:)),name:NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem)
playerController.player = player
playerController.allowsPictureInPicturePlayback = true
playerController.delegate = self
playerController.player?.play()
self.present(playerController,animated:true,completion:nil)
}
func didfinishplaying(note : NSNotification)
{
playerController.dismiss(animated: true,completion: nil)
let alertview = UIAlertController(title:"finished",message:"video finished",preferredStyle: .alert)
alertview.addAction(UIAlertAction(title:"Ok",style: .default, handler: nil))
self.present(alertview,animated:true,completion: nil)
}
func playerViewController(_ playerViewController: AVPlayerViewController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: #escaping (Bool) -> Void) {
let currentviewController = navigationController?.visibleViewController
if currentviewController != playerViewController
{
currentviewController?.present(playerViewController,animated: true,completion:nil)
}
}
}
Objective c
This only works in Xcode 7
Go to .h file and import AVKit/AVKit.h and
AVFoundation/AVFoundation.h. Then go .m file and add this code:
NSURL *url=[[NSBundle mainBundle]URLForResource:#"arreg" withExtension:#"mp4"];
AVPlayer *video=[AVPlayer playerWithURL:url];
AVPlayerViewController *controller=[[AVPlayerViewController alloc]init];
controller.player=video;
[self.view addSubview:controller.view];
controller.view.frame=self.view.frame;
[self addChildViewController:controller];
[video play];
Using MPMoviePlayerController :
import UIKit
import MediaPlayer
class ViewController: UIViewController {
var streamPlayer : MPMoviePlayerController = MPMoviePlayerController(contentURL: NSURL(string:"video url here"))
override func viewDidLoad() {
super.viewDidLoad()
streamPlayer.view.frame = self.view.bounds
self.view.addSubview(streamPlayer.view)
streamPlayer.fullscreen = true
// Play the movie!
streamPlayer.play()
}
}
Using AVPlayer :
import AVFoundation
var playerItem:AVPlayerItem?
var player:AVPlayer?
override func viewDidLoad() {
super.viewDidLoad()
let url = NSURL(string: "url of the audio or video")
playerItem = AVPlayerItem(URL: url!)
player=AVPlayer(playerItem: playerItem!)
let playerLayer=AVPlayerLayer(player: player!)
playerLayer.frame=CGRectMake(0, 0, 300, 50)
self.view.layer.addSublayer(playerLayer)
}
I have a play button to handle button tap.
playButton.addTarget(self, action: "playButtonTapped:", forControlEvents: .TouchUpInside)
func playButtonTapped(sender: AnyObject) {
if player?.rate == 0
{
player!.play()
playButton.setImage(UIImage(named: "player_control_pause_50px.png"), forState: UIControlState.Normal)
} else {
player!.pause()
playButton.setImage(UIImage(named: "player_control_play_50px.png"), forState: UIControlState.Normal)
}
}
I have added an observer listening for AVPlayerItemDidPlayToEndTimeNotification.
override func viewWillAppear(animated: Bool) {
NSNotificationCenter.defaultCenter().addObserver(self, selector: "finishedPlaying:", name: AVPlayerItemDidPlayToEndTimeNotification, object: playerItem)
}
override func viewWillDisappear(animated: Bool) {
NSNotificationCenter.defaultCenter().removeObserver(self)
}
When video/audio play is finished, reset button image and notification
func finishedPlaying(myNotification:NSNotification) {
playButton.setImage(UIImage(named: "player_control_play_50px.png"), forState: UIControlState.Normal)
let stopedPlayerItem: AVPlayerItem = myNotification.object as! AVPlayerItem
stopedPlayerItem.seekToTime(kCMTimeZero)
}
a bug(?!) in iOS10/Swift3/Xcode 8?
if let url = URL(string: "http://devstreaming.apple.com/videos/wwdc/2016/102w0bsn0ge83qfv7za/102/hls_vod_mvp.m3u8"){
let playerItem = AVPlayerItem(url: url)
let player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame=CGRect(x: 10, y: 10, width: 300, height: 300)
self.view.layer.addSublayer(playerLayer)
}
does not work (empty rect...)
this works:
if let url = URL(string: "http://devstreaming.apple.com/videos/wwdc/2016/102w0bsn0ge83qfv7za/102/hls_vod_mvp.m3u8"){
let player = AVPlayer(url: url)
let controller=AVPlayerViewController()
controller.player=player
controller.view.frame = self.view.frame
self.view.addSubview(controller.view)
self.addChildViewController(controller)
player.play()
}
Same URL...
Swift 3:
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var viewPlay: UIView!
var player : AVPlayer?
override func viewDidLoad() {
super.viewDidLoad()
let url : URL = URL(string: "http://static.videokart.ir/clip/100/480.mp4")!
player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.viewPlay.bounds
self.viewPlay.layer.addSublayer(playerLayer)
}
#IBAction func play(_ sender: Any) {
player?.play()
}
#IBAction func stop(_ sender: Any) {
player?.pause()
}
}
This worked for me in Swift 5
Just added sample video to the project from Sample Videos
Added action Buttons for playing videos from Website and Local with the following swift code example
import UIKit
import AVKit
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
//TODO : Make Sure Add and copy "SampleVideo.mp4" file in project before play
}
#IBAction func playWebVideo(_ sender: Any) {
guard let url = URL(string: "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4") else {
return
}
// Create an AVPlayer, passing it the HTTP Live Streaming URL.
let player = AVPlayer(url: url)
let controller = AVPlayerViewController()
controller.player = player
present(controller, animated: true) {
player.play()
}
}
#IBAction func playLocalVideo(_ sender: Any) {
guard let path = Bundle.main.path(forResource: "SampleVideo", ofType: "mp4") else {
return
}
let videoURL = NSURL(fileURLWithPath: path)
// Create an AVPlayer, passing it the local video url path
let player = AVPlayer(url: videoURL as URL)
let controller = AVPlayerViewController()
controller.player = player
present(controller, animated: true) {
player.play()
}
}
}
let videoUrl = //URL: Your Video URL
//Create player first using your URL
let yourplayer = AVPlayer(url: videoUrl)
//Create player controller and set it’s player
let playerController = AVPlayerViewController()
playerController.player = yourplayer
//Final step To present controller with player in your view controller
present(playerController, animated: true, completion: {
playerController.player!.play()
})
Swift 5.0
Improved from #ingconti answer . This worked for me.
if let url = URL(string: "urUrlString"){
let player = AVPlayer(url: url)
let avController = AVPlayerViewController()
avController.player = player
// your desired frame
avController.view.frame = self.view.frame
self.view.addSubview(avController.view)
self.addChild(avController)
player.play()
}
Custom VideoPlayer using ASVideoPlayer Library from github link : https://github.com/Asbat/ASVideoPlayer
// --------------------------------------------------------
// MARK:- variables
// --------------------------------------------------------
var videoPlayer = ASVideoPlayerController()
var videoData : [VideoModel] = []
var allVideoData : [AllVideoModel] = []
var cellHeights = [IndexPath: CGFloat]()
let loadingCellTableViewCellCellIdentifier = "LoadingCellTableViewCell"
var pauseIndexPath : Int = 0
var pageNumber = 1
var index = 0
var id = ""
var titleVideo = ""
var isUpdate = false
var myVideo : [MyVideo] = []
var imgs = [UIImage]()
var activityViewController : UIActivityViewController!
private var activityIndicator = NVActivityIndicatorView(frame: CGRect(x: 5, y: 5, width: 5, height: 5), type: .circleStrokeSpin, color: .systemBlue, padding: 5)
private let refreshControl = UIRefreshControl()
// --------------------------------------------------------
// MARK:- Outlets
// --------------------------------------------------------
#IBOutlet private var tableVideo: UITableView!
#IBOutlet private var _btnBack: UIButton!
#IBOutlet var _btnide: UIButton!
// ---------------------------------------------------------
// MARK:- Lifecycle
// ---------------------------------------------------------
override func viewDidLoad() {
super.viewDidLoad()
self._btnide.isHidden = true
tableVideo.rowHeight = UITableView.automaticDimension
tableVideo.separatorStyle = .none
tableVideo.delegate = self
tableVideo.dataSource = self
tableVideo.register(UINib(nibName: "VideoPlayerTableCell", bundle: nil), forCellReuseIdentifier: "VideoPlayerTableCell")
let cellNib = UINib(nibName:loadingCellTableViewCellCellIdentifier, bundle: nil)
tableVideo.register(cellNib, forCellReuseIdentifier: loadingCellTableViewCellCellIdentifier)
navigationController?.navigationBar.isHidden = true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
ASVideoPlayerController.sharedVideoPlayer.pausePlayeVideosFor(tableView: tableVideo)
tableVideo.scrollToRow(at: IndexPath(row: index, section: 0), at: .none, animated: true)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
tableVideo.scrollToRow(at: IndexPath(row: pauseIndexPath, section: 0), at: .none, animated: true)
puasVideoWhenPushVC(index: pauseIndexPath)
NotificationCenter.default.removeObserver(self)
if tableVideo.isHidden == true {
}
}
#IBAction func _onTapBackBtnAction(_ sender: UIButton) {
tableVideo.scrollToRow(at: IndexPath(row: pauseIndexPath, section: 0), at: .none, animated: true)
self.puasVideoWhenPushVC(index: pauseIndexPath)
navigationController?.popViewController(animated: true)
navigationController?.navigationBar.isHidden = false
}
// ---------------------------------------------------------------------
// MARK:- TableView Delegate & DataSource
// ---------------------------------------------------------------------
extension VideoPlayerVC :
UITableViewDelegate,UITableViewDataSource,UIScrollViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if isUpdate{
return videoData.count
}else{
return allVideoData.count
}
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if tableView == tableVideo {
return view.bounds.height
}else {
return UITableView.automaticDimension
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if tableView == tableVideo {
if let videoCell = cell as? ASAutoPlayVideoLayerContainer, let _ = videoCell.videoURL {
ASVideoPlayerController.sharedVideoPlayer.removeLayerFor(cell: videoCell)
}
}
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableVideo.dequeueReusableCell(withIdentifier: "VideoPlayerTableCell", for: indexPath) as! VideoPlayerTableCell
if isUpdate{
self.id = videoData[indexPath.row].id ?? ""
cell.configureCell(videoUrl: videoData[indexPath.row].videoLink )
}else{
self.id = allVideoData[indexPath.row].id ?? ""
cell.configureCell(videoUrl: allVideoData[indexPath.row].videoLink)
}
cell.btnPlayPause.isSelected = false
cell.btnPlayPause.tag = indexPath.row
cell.btnPlayPause.addTarget(self, action: #selector(didTapPlayPauseButton(_:)), for: .touchUpInside)
cell.btnPlayPause.setImage(UIImage(named: ""), for: .normal)
cell.btnPlayPause.setImage(UIImage(named: "btn_play_video"), for: .selected)
cell.btnUseNow.tag = indexPath.row
cell.btnUseNow.addTarget(self, action: #selector(btnUseNowTapped(sender:)), for: .touchUpInside)
cell.btnShare.tag = indexPath.row
cell.btnShare.addTarget(self, action: #selector(btnShareTapped(sender:)), for: .touchUpInside)
cell.btnSave.tag = indexPath.row
cell.btnSave.addTarget(self, action: #selector(btnSaveTapped(sender:)), for: .touchUpInside)
pauseIndexPath = indexPath.row
return cell
}
#objc func btnUseNowTapped(sender: UIButton){
self._btnide.isHidden = false
self.pausePlayeVideos()
let editVC = EditVideoVC()
var fileName : String = kEmptyString
if self.isUpdate{
editVC.videoString = self.videoData[sender.tag].videoLink ?? kEmptyString
editVC.id = self.videoData[sender.tag].id ?? kEmptyString
editVC.titleVideo = self.videoData[sender.tag].title ?? kEmptyString
fileName = self.videoData[sender.tag].videoZip ?? kEmptyString
guard !FileManager.isExist(id: self.videoData[sender.tag].id ?? kEmptyString) else{
print("File Downloaded")
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
return }
FileManager.download(id: self.videoData[sender.tag].id ?? kEmptyString, url: fileName) { (url) in
guard url != nil else {
print("not download")
return
}
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
}
}
else{
editVC.videoString = self.allVideoData[sender.tag].videoLink ?? kEmptyString
editVC.id = self.allVideoData[sender.tag].id ?? kEmptyString
editVC.titleVideo = self.allVideoData[sender.tag].title ?? kEmptyString
fileName = self.allVideoData[sender.tag].videoZip ?? kEmptyString
guard !FileManager.isExist(id: self.allVideoData[sender.tag].id ?? kEmptyString) else{
print("File Downloaded")
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
return }
FileManager.download(id: self.allVideoData[sender.tag].id ?? kEmptyString, url: fileName) { (url) in
guard url != nil else {
print("not download")
return
}
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
}
}
}
#objc func btnShareTapped(sender: UIButton){
if self.isUpdate{
let video = ["\(String(describing: self.videoData[sender.tag].videoLink))"]
self.activityViewController = UIActivityViewController(activityItems: video, applicationActivities: nil)
self.activityViewController.setValue("Video Share", forKey: "subject")
self.activityViewController.popoverPresentationController?.sourceView = self.view
self.activityViewController.excludedActivityTypes = [ UIActivity.ActivityType.airDrop, UIActivity.ActivityType.postToTwitter, UIActivity.ActivityType.addToReadingList, UIActivity.ActivityType.assignToContact,UIActivity.ActivityType.copyToPasteboard,UIActivity.ActivityType.mail,UIActivity.ActivityType.markupAsPDF,UIActivity.ActivityType.message,UIActivity.ActivityType.postToFacebook,UIActivity.ActivityType.postToFlickr,UIActivity.ActivityType.postToTencentWeibo,UIActivity.ActivityType.postToVimeo,UIActivity.ActivityType.postToWeibo,UIActivity.ActivityType.saveToCameraRoll]
self.present(self.activityViewController, animated: true, completion: nil)
}
else{
let categoryVideo = ["\(String(describing: self.allVideoData[sender.tag].videoLink))"]
self.activityViewController = UIActivityViewController(activityItems: categoryVideo, applicationActivities: nil)
self.activityViewController.setValue("Video Share", forKey: "subject")
self.activityViewController.popoverPresentationController?.sourceView = self.view
self.activityViewController.excludedActivityTypes = [ UIActivity.ActivityType.airDrop, UIActivity.ActivityType.postToTwitter]
self.present(self.activityViewController, animated: true, completion: nil)
}
}
#objc func btnSaveTapped(sender: UIButton){
if self.isUpdate{
self.downloadVideos(video: self.videoData[sender.tag].videoLink ?? kEmptyString)
}else{
self.downloadVideos(video: self.allVideoData[sender.tag].videoLink ?? kEmptyString)
}
}
private func downloadVideos(video : String){
Alamofire.request(video).downloadProgress(closure : { (progress) in
}).responseData{ (response) in
///# Create folder in documetn directory #///
if let data = response.result.value{
let path = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString).appendingPathComponent("Videos")
if !FileManager.default.fileExists(atPath: path) {
try! FileManager.default.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil)
}
let fileURL = URL(fileURLWithPath:path).appendingPathComponent("\(self.id)/\(self.titleVideo)/output.mp4")
print(fileURL)
do{
try data.write(to: fileURL, options: .atomic)
}catch{
print("could not download")
}
print(fileURL)
}
}
}
// ----------------------------------------------------------------------
// MARK:- Scrollview Method
// ----------------------------------------------------------------------
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
if scrollView == tableVideo {
pauseIndexPath = Int(scrollView.contentOffset.y / scrollView.frame.size.height)
pausePlayeVideos()
}
}
func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) {
if scrollView == tableVideo {
if !decelerate {
pausePlayeVideos()
}
}
}
// ----------------------------------------------------------------------
// MARK:- Function Pause & Play Button
// ----------------------------------------------------------------------
func puasVideoWhenPushVC (index : NSInteger) {
if isUpdate{
guard let cell = tableVideo.cellForRow(at: IndexPath(row: index, section: 0)) as? VideoPlayerTableCell else { return }
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: videoData[index].videoLink ?? "")
}
else{
guard let cell = tableVideo.cellForRow(at: IndexPath(row: index, section: 0)) as? VideoPlayerTableCell else { return }
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: allVideoData[index].videoLink ?? "")
}
}
#objc func pausePlayeVideos(){
ASVideoPlayerController.sharedVideoPlayer.pausePlayeVideosFor(tableView: tableVideo)
}
#objc func appEnteredFromBackground() {
ASVideoPlayerController.sharedVideoPlayer.pausePlayeVideosFor(tableView: tableVideo, appEnteredFromBackground: true)
}
#objc func didTapPlayPauseButton(_ sender: UIButton) {
guard let cell = tableVideo.cellForRow(at: IndexPath(row: sender.tag, section: 0)) as? VideoPlayerTableCell else { return }
if sender.isSelected {
if isUpdate{
ASVideoPlayerController.sharedVideoPlayer.playVideo(withLayer: cell.videoLayer, url: videoData[sender.tag].videoLink ?? "")
}else{
ASVideoPlayerController.sharedVideoPlayer.playVideo(withLayer: cell.videoLayer, url: allVideoData[sender.tag].videoLink ?? "")
}
} else {
if isUpdate{
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: videoData[sender.tag].videoLink ?? "")
}else{
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: allVideoData[sender.tag].videoLink ?? "")
}
}
sender.isSelected = !sender.isSelected
}
Swift 5
#IBAction func buttonPressed(_ sender: Any) {
let videoURL = course.introductionVideoURL
let player = AVPlayer(url: videoURL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
present(playerViewController, animated: true, completion: {
playerViewController.player!.play()
})
// here the course includes a model file, inside it I have given the url, so I am calling the function from model using course function.
// also introductionVideoUrl is a URL which I declared inside model .
var introductionVideoURL: URL
Also alternatively you can use the below code instead of calling the function from model
Replace this code
let videoURL = course.introductionVideoURL
with
guard let videoURL = URL(string: "https://something.mp4) else {
return

How to play a local video with Swift?

I have a short mp4 video file that I've added to my current Xcode6 Beta project.
I want to play the video in my app.
After hours searching, I can't find anything remotely helpful. Is there a way to accomplish this with Swift or do you have to use Objective-C?
Can I get pointed in the right direction? I can't be the only one wondering this.
Sure you can use Swift!
1. Adding the video file
Add the video (lets call it video.m4v) to your Xcode project
2. Checking your video is into the Bundle
Open the Project Navigator cmd + 1
Then select your project root > your Target > Build Phases > Copy Bundle Resources.
Your video MUST be here. If it's not, then you should add it using the plus button
3. Code
Open your View Controller and write this code.
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
playVideo()
}
private func playVideo() {
guard let path = Bundle.main.path(forResource: "video", ofType:"m4v") else {
debugPrint("video.m4v not found")
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerController = AVPlayerViewController()
playerController.player = player
present(playerController, animated: true) {
player.play()
}
}
}
another Swift 3 Example. The provided solution did not work for me.
private func playVideo(from file:String) {
let file = file.components(separatedBy: ".")
guard let path = Bundle.main.path(forResource: file[0], ofType:file[1]) else {
debugPrint( "\(file.joined(separator: ".")) not found")
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
}
useage:
playVideo(from: "video.extension")
Note:
Check Copy Bundle Resources under Build Phases to ensure that the video is available to the Project.
U can setup AVPlayer in another way, that open for u full customization of your video Player screen
Swift 2.3
Create UIView subclass for playing video (basically u can use any UIView object and only needed is AVPlayerLayer. I setup in this way because it much clearer for me)
import AVFoundation
import UIKit
class PlayerView: UIView {
override class func layerClass() -> AnyClass {
return AVPlayerLayer.self
}
var player:AVPlayer? {
set {
if let layer = layer as? AVPlayerLayer {
layer.player = player
}
}
get {
if let layer = layer as? AVPlayerLayer {
return layer.player
} else {
return nil
}
}
}
}
Setup your player
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext:UnsafeMutablePointer<Void> = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 1.0 {
didSet {
if let player = assetPlayer {
player.volume = volume > 0 ? volume : 0.0
}
}
}
// MARK: - Init
convenience init(urlAsset:NSURL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView, let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
}
initialSetupWithURL(urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seekToTime(CMTimeMakeWithSeconds(seconds, timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .ReadyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NSNotificationCenter.defaultCenter().removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronouslyForKeys(keys, completionHandler: {
dispatch_async(dispatch_get_main_queue(), {
self.startLoading()
})
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
let status:AVKeyValueStatus = asset.statusOfValueForKey("tracks", error: &error)
if status == AVKeyValueStatus.Loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .Initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.New, .Old], context: videoContext)
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(playerItemDidReachEnd), name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(didFailedToPlayToEnd), name: AVPlayerItemFailedToPlayToEndTimeNotification, object: nil)
if let output = videoOutput {
item.addOutput(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
print("player created")
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(1, 1)
if let player = assetPlayer {
player.addPeriodicTimeObserverForInterval(timeInterval, queue: dispatch_get_main_queue(), usingBlock: { (time) in
self.playerDidChangeTime(time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seekToTime(kCMTimeZero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayerStatus) {
if status == .Failed {
print("Failed to load video")
} else if status == .ReadyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:NSTimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.CMTimeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url:NSURL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
urlAsset = AVURLAsset(URL: url, options: options)
}
// MARK: - Observations
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if context == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(item.loadedTimeRanges)
}
}
}
}
}
Usage:
assume u have view
#IBOutlet private weak var playerView: PlayerView!
private var videoPlayer:VideoPlayer?
and in viewDidLoad()
private func preparePlayer() {
if let filePath = NSBundle.mainBundle().pathForResource("intro", ofType: "m4v") {
let fileURL = NSURL(fileURLWithPath: filePath)
videoPlayer = VideoPlayer(urlAsset: fileURL, view: playerView)
if let player = videoPlayer {
player.playerRate = 0.67
}
}
}
Objective-C
PlayerView.h
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
/*!
#class PlayerView
#discussion Represent View for playinv video. Layer - PlayerLayer
#availability iOS 7 and Up
*/
#interface PlayerView : UIView
/*!
#var player
#discussion Player object
*/
#property (strong, nonatomic) AVPlayer *player;
#end
PlayerView.m
#import "PlayerView.h"
#implementation PlayerView
#pragma mark - LifeCycle
+ (Class)layerClass
{
return [AVPlayerLayer class];
}
#pragma mark - Setter/Getter
- (AVPlayer*)player
{
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player
{
[(AVPlayerLayer *)[self layer] setPlayer:player];
}
#end
VideoPlayer.h
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
#import "PlayerView.h"
/*!
#protocol VideoPlayerDelegate
#discussion Events from VideoPlayer
*/
#protocol VideoPlayerDelegate <NSObject>
#optional
/*!
#brief Called whenever time when progress of played item changed
#param progress
Playing progress
*/
- (void)progressDidUpdate:(CGFloat)progress;
/*!
#brief Called whenever downloaded item progress changed
#param progress
Playing progress
*/
- (void)downloadingProgress:(CGFloat)progress;
/*!
#brief Called when playing time changed
#param time
Playing progress
*/
- (void)progressTimeChanged:(CMTime)time;
/*!
#brief Called when player finish play item
*/
- (void)playerDidPlayItem;
/*!
#brief Called when player ready to play item
*/
- (void)isReadyToPlay;
#end
/*!
#class VideoPlayer
#discussion Video Player
#code
self.videoPlayer = [[VideoPlayer alloc] initVideoPlayerWithURL:someURL playerView:self.playerView];
[self.videoPlayer prepareToPlay];
self.videoPlayer.delegate = self; //optional
//after when required play item
[self.videoPlayer play];
#endcode
*/
#interface VideoPlayer : NSObject
/*!
#var delegate
#abstract Delegate for VideoPlayer
#discussion Set object to this property for getting response and notifications from this class
*/
#property (weak, nonatomic) id <VideoPlayerDelegate> delegate;
/*!
#var volume
#discussion volume of played asset
*/
#property (assign, nonatomic) CGFloat volume;
/*!
#var autoRepeat
#discussion indicate whenever player should repeat content on finish playing
*/
#property (assign, nonatomic) BOOL autoRepeat;
/*!
#brief Create player with asset URL
#param urlAsset
Source URL
#result
instance of VideoPlayer
*/
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset;
/*!
#brief Create player with asset URL and configure selected view for showing result
#param urlAsset
Source URL
#param view
View on wchich result will be showed
#result
instance of VideoPlayer
*/
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset playerView:(PlayerView *)view;
/*!
#brief Call this method after creating player to prepare player to play
*/
- (void)prepareToPlay;
/*!
#brief Play item
*/
- (void)play;
/*!
#brief Pause item
*/
- (void)pause;
/*!
#brief Stop item
*/
- (void)stop;
/*!
#brief Seek required position in item and pla if rquired
#param progressValue
% of position to seek
#param isPlaying
YES if player should start to play item implicity
*/
- (void)seekPositionAtProgress:(CGFloat)progressValue withPlayingStatus:(BOOL)isPlaying;
/*!
#brief Player state
#result
YES - if playing, NO if not playing
*/
- (BOOL)isPlaying;
/*!
#brief Indicate whenever player can provide CVPixelBufferRef frame from item
#result
YES / NO
*/
- (BOOL)canProvideFrame;
/*!
#brief CVPixelBufferRef frame from item
#result
CVPixelBufferRef frame
*/
- (CVPixelBufferRef)getCurrentFramePicture;
#end
VideoPlayer.m
#import "VideoPlayer.h"
typedef NS_ENUM(NSUInteger, InternalStatus) {
InternalStatusPreparation,
InternalStatusReadyToPlay,
};
static const NSString *ItemStatusContext;
#interface VideoPlayer()
#property (strong, nonatomic) AVPlayer *assetPlayer;
#property (strong, nonatomic) AVPlayerItem *playerItem;
#property (strong, nonatomic) AVURLAsset *urlAsset;
#property (strong, atomic) AVPlayerItemVideoOutput *videoOutput;
#property (assign, nonatomic) CGFloat assetDuration;
#property (strong, nonatomic) PlayerView *playerView;
#property (assign, nonatomic) InternalStatus status;
#end
#implementation VideoPlayer
#pragma mark - LifeCycle
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset
{
if (self = [super init]) {
[self initialSetupWithURL:urlAsset];
}
return self;
}
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset playerView:(PlayerView *)view
{
if (self = [super init]) {
((AVPlayerLayer *)view.layer).videoGravity = AVLayerVideoGravityResizeAspectFill;
[self initialSetupWithURL:urlAsset playerView:view];
}
return self;
}
#pragma mark - Public
- (void)play
{
if ((self.assetPlayer.currentItem) && (self.assetPlayer.currentItem.status == AVPlayerItemStatusReadyToPlay)) {
[self.assetPlayer play];
}
}
- (void)pause
{
[self.assetPlayer pause];
}
- (void)seekPositionAtProgress:(CGFloat)progressValue withPlayingStatus:(BOOL)isPlaying
{
[self.assetPlayer pause];
int32_t timeScale = self.assetPlayer.currentItem.asset.duration.timescale;
__weak typeof(self) weakSelf = self;
[self.assetPlayer seekToTime:CMTimeMakeWithSeconds(progressValue, timeScale) completionHandler:^(BOOL finished) {
DLog(#"SEEK To time %f - success", progressValue);
if (isPlaying && finished) {
[weakSelf.assetPlayer play];
}
}];
}
- (void)setPlayerVolume:(CGFloat)volume
{
self.assetPlayer.volume = volume > .0 ? MAX(volume, 0.7) : 0.0f;
[self.assetPlayer play];
}
- (void)setPlayerRate:(CGFloat)rate
{
self.assetPlayer.rate = rate > .0 ? rate : 0.0f;
}
- (void)stop
{
[self.assetPlayer seekToTime:kCMTimeZero];
self.assetPlayer.rate =.0f;
}
- (BOOL)isPlaying
{
return self.assetPlayer.rate > 0 ? YES : NO;
}
#pragma mark - Private
- (void)initialSetupWithURL:(NSURL *)url
{
self.status = InternalStatusPreparation;
[self setupPlayerWithURL:url];
}
- (void)initialSetupWithURL:(NSURL *)url playerView:(PlayerView *)view
{
[self setupPlayerWithURL:url];
self.playerView = view;
}
- (void)setupPlayerWithURL:(NSURL *)url
{
NSDictionary *assetOptions = #{ AVURLAssetPreferPreciseDurationAndTimingKey : #YES };
self.urlAsset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
}
- (void)prepareToPlay
{
NSArray *keys = #[#"tracks"];
__weak VideoPlayer *weakSelf = self;
[weakSelf.urlAsset loadValuesAsynchronouslyForKeys:keys completionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf startLoading];
});
}];
}
- (void)startLoading
{
NSError *error;
AVKeyValueStatus status = [self.urlAsset statusOfValueForKey:#"tracks" error:&error];
if (status == AVKeyValueStatusLoaded) {
self.assetDuration = CMTimeGetSeconds(self.urlAsset.duration);
NSDictionary* videoOutputOptions = #{ (id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)};
self.videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:videoOutputOptions];
self.playerItem = [AVPlayerItem playerItemWithAsset: self.urlAsset];
[self.playerItem addObserver:self
forKeyPath:#"status"
options:NSKeyValueObservingOptionInitial
context:&ItemStatusContext];
[self.playerItem addObserver:self
forKeyPath:#"loadedTimeRanges"
options:NSKeyValueObservingOptionNew|NSKeyValueObservingOptionOld
context:&ItemStatusContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:self.playerItem];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(didFailedToPlayToEnd)
name:AVPlayerItemFailedToPlayToEndTimeNotification
object:nil];
[self.playerItem addOutput:self.videoOutput];
self.assetPlayer = [AVPlayer playerWithPlayerItem:self.playerItem];
[self addPeriodicalObserver];
[((AVPlayerLayer *)self.playerView.layer) setPlayer:self.assetPlayer];
DLog(#"Player created");
} else {
DLog(#"The asset's tracks were not loaded:\n%#", error.localizedDescription);
}
}
#pragma mark - Observation
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
BOOL isOldKey = [change[NSKeyValueChangeNewKey] isEqual:change[NSKeyValueChangeOldKey]];
if (!isOldKey) {
if (context == &ItemStatusContext) {
if ([keyPath isEqualToString:#"status"] && !self.status) {
if (self.assetPlayer.status == AVPlayerItemStatusReadyToPlay) {
self.status = InternalStatusReadyToPlay;
}
[self moviePlayerDidChangeStatus:self.assetPlayer.status];
} else if ([keyPath isEqualToString:#"loadedTimeRanges"]) {
[self moviewPlayerLoadedTimeRangeDidUpdated:self.playerItem.loadedTimeRanges];
}
}
}
}
- (void)moviePlayerDidChangeStatus:(AVPlayerStatus)status
{
if (status == AVPlayerStatusFailed) {
DLog(#"Failed to load video");
} else if (status == AVPlayerItemStatusReadyToPlay) {
DLog(#"Player ready to play");
self.volume = self.assetPlayer.volume;
if (self.delegate && [self.delegate respondsToSelector:#selector(isReadyToPlay)]) {
[self.delegate isReadyToPlay];
}
}
}
- (void)moviewPlayerLoadedTimeRangeDidUpdated:(NSArray *)ranges
{
NSTimeInterval maximum = 0;
for (NSValue *value in ranges) {
CMTimeRange range;
[value getValue:&range];
NSTimeInterval currenLoadedRangeTime = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration);
if (currenLoadedRangeTime > maximum) {
maximum = currenLoadedRangeTime;
}
}
CGFloat progress = (self.assetDuration == 0) ? 0 : maximum / self.assetDuration;
if (self.delegate && [self.delegate respondsToSelector:#selector(downloadingProgress:)]) {
[self.delegate downloadingProgress:progress];
}
}
- (void)playerItemDidReachEnd:(NSNotification *)notification
{
if (self.delegate && [self.delegate respondsToSelector:#selector(playerDidPlayItem)]){
[self.delegate playerDidPlayItem];
}
[self.assetPlayer seekToTime:kCMTimeZero];
if (self.autoRepeat) {
[self.assetPlayer play];
}
}
- (void)didFailedToPlayToEnd
{
DLog(#"Failed play video to the end");
}
- (void)addPeriodicalObserver
{
CMTime interval = CMTimeMake(1, 1);
__weak typeof(self) weakSelf = self;
[self.assetPlayer addPeriodicTimeObserverForInterval:interval queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
[weakSelf playerTimeDidChange:time];
}];
}
- (void)playerTimeDidChange:(CMTime)time
{
double timeNow = CMTimeGetSeconds(self.assetPlayer.currentTime);
if (self.delegate && [self.delegate respondsToSelector:#selector(progressDidUpdate:)]) {
[self.delegate progressDidUpdate:(CGFloat) (timeNow / self.assetDuration)];
}
}
#pragma mark - Notification
- (void)setupAppNotification
{
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(didEnterBackground) name:UIApplicationDidEnterBackgroundNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(willEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil];
}
- (void)didEnterBackground
{
[self.assetPlayer pause];
}
- (void)willEnterForeground
{
[self.assetPlayer pause];
}
#pragma mark - GetImagesFromVideoPlayer
- (BOOL)canProvideFrame
{
return self.assetPlayer.status == AVPlayerItemStatusReadyToPlay;
}
- (CVPixelBufferRef)getCurrentFramePicture
{
CMTime currentTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if (self.delegate && [self.delegate respondsToSelector:#selector(progressTimeChanged:)]) {
[self.delegate progressTimeChanged:currentTime];
}
if (![self.videoOutput hasNewPixelBufferForItemTime:currentTime]) {
return 0;
}
CVPixelBufferRef buffer = [self.videoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:NULL];
return buffer;
}
#pragma mark - CleanUp
- (void)removeObserversFromPlayer
{
#try {
[self.playerItem removeObserver:self forKeyPath:#"status"];
[self.playerItem removeObserver:self forKeyPath:#"loadedTimeRanges"];
[[NSNotificationCenter defaultCenter] removeObserver:self];
[[NSNotificationCenter defaultCenter] removeObserver:self.assetPlayer];
}
#catch (NSException *ex) {
DLog(#"Cant remove observer in Player - %#", ex.description);
}
}
- (void)cleanUp
{
[self removeObserversFromPlayer];
self.assetPlayer.rate = 0;
self.assetPlayer = nil;
self.playerItem = nil;
self.urlAsset = nil;
}
- (void)dealloc
{
[self cleanUp];
}
#end
Off cause resource (video file) should have target membership setted to your project
Additional - link to perfect Apple Developer guide
Here a solution for Swift 5.2
PlayerView.swift:
import AVFoundation
import UIKit
class PlayerView: UIView {
var player: AVPlayer? {
get {
return playerLayer.player
}
set {
playerLayer.player = newValue
}
}
var playerLayer: AVPlayerLayer {
return layer as! AVPlayerLayer
}
// Override UIView property
override static var layerClass: AnyClass {
return AVPlayerLayer.self
}
}
VideoPlayer.swift
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext: UnsafeMutableRawPointer? = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 1.0 {
didSet {
if let player = assetPlayer {
player.volume = volume > 0 ? volume : 0.0
}
}
}
// MARK: - Init
convenience init(urlAsset:NSURL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView, let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
}
initialSetupWithURL(url: urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seek(to: CMTimeMakeWithSeconds(seconds, preferredTimescale: timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .readyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NotificationCenter.default.removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.main.async {
self.startLoading()
}
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
let status:AVKeyValueStatus = asset.statusOfValue(forKey: "tracks", error: &error)
if status == AVKeyValueStatus.loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.new, .old], context: videoContext)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didFailedToPlayToEnd), name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: nil)
if let output = videoOutput {
item.add(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithm.varispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
print("player created")
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(value: 1, timescale: 1)
if let player = assetPlayer {
player.addPeriodicTimeObserver(forInterval: timeInterval, queue: DispatchQueue.main, using: { (time) in
self.playerDidChangeTime(time: time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress: progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seek(to: CMTime.zero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayer.Status) {
if status == .failed {
print("Failed to load video")
} else if status == .readyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:TimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.timeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress: progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url:NSURL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
urlAsset = AVURLAsset(url: url as URL, options: options)
}
// MARK: - Observations
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(status: player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(ranges: item.loadedTimeRanges)
}
}
}
}
}
Usage:
private var playerView: PlayerView = PlayerView()
private var videoPlayer:VideoPlayer?
and inside viewDidLoad():
view.addSubview(playerView)
preparePlayer()
// set Constraints (if you do it purely in code)
playerView.translatesAutoresizingMaskIntoConstraints = false
playerView.topAnchor.constraint(equalTo: view.topAnchor, constant: 10.0).isActive = true
playerView.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant: 10.0).isActive = true
playerView.trailingAnchor.constraint(equalTo: view.trailingAnchor, constant: -10.0).isActive = true
playerView.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: 10.0).isActive = true
private func preparePlayer() {
if let filePath = Bundle.main.path(forResource: "my video", ofType: "mp4") {
let fileURL = NSURL(fileURLWithPath: filePath)
videoPlayer = VideoPlayer(urlAsset: fileURL, view: playerView)
if let player = videoPlayer {
player.playerRate = 0.67
}
}
}
Swift 3
if let filePath = Bundle.main.path(forResource: "small", ofType: ".mp4") {
let filePathURL = NSURL.fileURL(withPath: filePath)
let player = AVPlayer(url: filePathURL)
let playerController = AVPlayerViewController()
playerController.player = player
self.present(playerController, animated: true) {
player.play()
}
}
None of the above worked for me Swift 5 for Local Video Player
after reading apple documentation I was able to create simple example for playing video from Local resources
Here is code snip
import UIKit
import AVKit
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
//TODO : Make Sure Add and copy "SampleVideo.mp4" file in project before play
}
#IBAction func playLocalVideo(_ sender: Any) {
guard let path = Bundle.main.path(forResource: "SampleVideo", ofType: "mp4") else {
return
}
let videoURL = NSURL(fileURLWithPath: path)
// Create an AVPlayer, passing it the local video url path
let player = AVPlayer(url: videoURL as URL)
let controller = AVPlayerViewController()
controller.player = player
present(controller, animated: true) {
player.play()
}
}
}
PS: Make sure you don't forget to add and copy video named
"SampleVideo.mp4" in project
these code is converted code from gbk answer in swift 4
1.in your main controllview :
if let filePath = Bundle.main.path(forResource: "clip", ofType: "mp4") {
let fileURL = NSURL(fileURLWithPath: filePath)
videoPlayer = VideoPlayer(urlAsset: fileURL, view: playerView)
if let player = videoPlayer {
player.playerRate = 1.00
}
}
you need VideoPlayer class
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext:UnsafeMutablePointer<Void>? = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 0 {
didSet {
if let player = assetPlayer {
player.volume = 50
}
}
}
// MARK: - Init
convenience init(urlAsset:NSURL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView{
if let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
}
}
initialSetupWithURL(url: urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seek(to: CMTimeMakeWithSeconds(seconds, timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .readyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NotificationCenter.default.removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.global(qos: .userInitiated).async {
// Bounce back to the main thread to update the UI
DispatchQueue.main.async {
self.startLoading()
}
}
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
// let status:AVKeyValueStatus = asset.statusOfValueForKey("tracks", error: &error)
let status:AVKeyValueStatus = asset.statusOfValue(forKey: "tracks", error: nil)
if status == AVKeyValueStatus.loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.new, .old], context: videoContext)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didFailedToPlayToEnd), name:NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: nil)
if let output = videoOutput {
item.add(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithm.varispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(1, 1)
if let player = assetPlayer {
player.addPeriodicTimeObserver(forInterval: timeInterval, queue:
DispatchQueue.main
, using: { (time) in
self.playerDidChangeTime(time: time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress: progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seek(to: kCMTimeZero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayerStatus) {
if status == .failed {
print("Failed to load video")
} else if status == .readyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:TimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.timeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress: progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url:NSURL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
// urlAsset = AVURLAsset(URL: url, options: options)
urlAsset = AVURLAsset(url: url as URL, options: options)
}
// MARK: - Observations
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?){
if (context as? UnsafeMutablePointer<Void> ) == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(status: player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(ranges: item.loadedTimeRanges)
}
}
}
}
}
and a PlayerView class to create a videoview:
import AVFoundation
import UIKit
class PlayerView: UIView {
override class var layerClass: AnyClass {
get {
return AVPlayerLayer.self
}
}
var player:AVPlayer? {
set {
if let layer = layer as? AVPlayerLayer {
layer.player = player
}
}
get {
if let layer = layer as? AVPlayerLayer {
return layer.player
} else {
return nil
}
}
}
}
PlayerView for swift 4.2
import AVFoundation
import UIKit
class PlayerView: UIView {
var player: AVPlayer? {
get {
return playerLayer.player
}
set {
playerLayer.player = newValue
}
}
var playerLayer: AVPlayerLayer {
return layer as! AVPlayerLayer
}
// Override UIView property
override static var layerClass: AnyClass {
return AVPlayerLayer.self
}
}
gbk's solution in swift 3
PlayerView
import AVFoundation
import UIKit
class PlayerView: UIView {
override class var layerClass: AnyClass {
return AVPlayerLayer.self
}
var player:AVPlayer? {
set {
if let layer = layer as? AVPlayerLayer {
layer.player = player
}
}
get {
if let layer = layer as? AVPlayerLayer {
return layer.player
} else {
return nil
}
}
}
}
VideoPlayer
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext:UnsafeMutableRawPointer? = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 1.0 {
didSet {
if let player = assetPlayer {
player.volume = volume > 0 ? volume : 0.0
}
}
}
// MARK: - Init
convenience init(urlAsset: URL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView, let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
}
initialSetupWithURL(url: urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seek(to: CMTimeMakeWithSeconds(seconds, timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .readyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NotificationCenter.default.removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.main.async {
self.startLoading()
}
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
let status:AVKeyValueStatus = asset.statusOfValue(forKey: "tracks", error: &error)
if status == AVKeyValueStatus.loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.new, .old], context: videoContext)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didFailedToPlayToEnd), name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: nil)
if let output = videoOutput {
item.add(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
print("player created")
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(1, 1)
if let player = assetPlayer {
player.addPeriodicTimeObserver(forInterval: timeInterval, queue: DispatchQueue.main, using: { (time) in
self.playerDidChangeTime(time: time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress: progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seek(to: kCMTimeZero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayerStatus) {
if status == .failed {
print("Failed to load video")
} else if status == .readyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:TimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.timeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress: progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url: URL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
urlAsset = AVURLAsset(url: url, options: options)
}
// MARK: - Observations
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(status: player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(ranges: item.loadedTimeRanges)
}
}
}
}
}

Resources