How to play video with AVPlayerViewController (AVKit) in Swift - ios

How do you play a video with AV Kit Player View Controller in Swift?
override func viewDidLoad() {
super.viewDidLoad()
let videoURLWithPath = "http://****/5.m3u8"
let videoURL = NSURL(string: videoURLWithPath)
playerViewController = AVPlayerViewController()
dispatch_async(dispatch_get_main_queue()) {
self.playerViewController?.player = AVPlayer.playerWithURL(videoURL) as AVPlayer
}
}

Swift 3.x - 5.x
Necessary: import AVKit, import AVFoundation
AVFoundation framework is needed even if you use AVPlayer
If you want to use AVPlayerViewController:
let videoURL = URL(string: "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")
let player = AVPlayer(url: videoURL!)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
or just AVPlayer:
let videoURL = URL(string: "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4")
let player = AVPlayer(url: videoURL!)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
It's better to put this code into the method: override func viewDidAppear(_ animated: Bool) or somewhere after.
Objective-C
AVPlayerViewController:
NSURL *videoURL = [NSURL URLWithString:#"https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"];
AVPlayer *player = [AVPlayer playerWithURL:videoURL];
AVPlayerViewController *playerViewController = [AVPlayerViewController new];
playerViewController.player = player;
[self presentViewController:playerViewController animated:YES completion:^{
[playerViewController.player play];
}];
or just AVPlayer:
NSURL *videoURL = [NSURL URLWithString:#"https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"];
AVPlayer *player = [AVPlayer playerWithURL:videoURL];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = self.view.bounds;
[self.view.layer addSublayer:playerLayer];
[player play];

Try this, definitely works for Swift 2.0
let player = AVPlayer(URL: url)
let playerController = AVPlayerViewController()
playerController.player = player
self.addChildViewController(playerController)
self.view.addSubview(playerController.view)
playerController.view.frame = self.view.frame
player.play()

Swift 5+
First of all you have to define 2 variables globally inside your view controller.
var player: AVPlayer!
var playerViewController: AVPlayerViewController!
Here I'm adding player to a desired view.
#IBOutlet weak var playerView: UIView!
Then add following code to the viewDidLoad method.
let videoURL = URL(string: "videoUrl")
self.player = AVPlayer(url: videoURL!)
self.playerViewController = AVPlayerViewController()
playerViewController.player = self.player
playerViewController.view.frame = self.playerView.frame
playerViewController.player?.pause()
self.playerView.addSubview(playerViewController.view)
If you are not defining player and playerViewController globally, you won't be able to embed player.

Try This
var player:AVPlayer!
var avPlayerLayer:AVPlayerLayer = AVPlayerLayer(player: player)
avPlayerLayer.frame = CGRectMake(your frame)
self.view.layer .addSublayer(avPlayerLayer)
var steamingURL:NSURL = NSURL(string:playerURL)
player = AVPlayer(URL: steamingURL)
player.play()

Swift 3.0 Full source code:
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController,AVPlayerViewControllerDelegate
{
var playerController = AVPlayerViewController()
#IBAction func Play(_ sender: Any)
{
let path = Bundle.main.path(forResource: "video", ofType: "mp4")
let url = NSURL(fileURLWithPath: path!)
let player = AVPlayer(url:url as URL)
playerController = AVPlayerViewController()
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.didfinishplaying(note:)),name:NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem)
playerController.player = player
playerController.allowsPictureInPicturePlayback = true
playerController.delegate = self
playerController.player?.play()
self.present(playerController,animated:true,completion:nil)
}
func didfinishplaying(note : NSNotification)
{
playerController.dismiss(animated: true,completion: nil)
let alertview = UIAlertController(title:"finished",message:"video finished",preferredStyle: .alert)
alertview.addAction(UIAlertAction(title:"Ok",style: .default, handler: nil))
self.present(alertview,animated:true,completion: nil)
}
func playerViewController(_ playerViewController: AVPlayerViewController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: #escaping (Bool) -> Void) {
let currentviewController = navigationController?.visibleViewController
if currentviewController != playerViewController
{
currentviewController?.present(playerViewController,animated: true,completion:nil)
}
}
}

Objective c
This only works in Xcode 7
Go to .h file and import AVKit/AVKit.h and
AVFoundation/AVFoundation.h. Then go .m file and add this code:
NSURL *url=[[NSBundle mainBundle]URLForResource:#"arreg" withExtension:#"mp4"];
AVPlayer *video=[AVPlayer playerWithURL:url];
AVPlayerViewController *controller=[[AVPlayerViewController alloc]init];
controller.player=video;
[self.view addSubview:controller.view];
controller.view.frame=self.view.frame;
[self addChildViewController:controller];
[video play];

Using MPMoviePlayerController :
import UIKit
import MediaPlayer
class ViewController: UIViewController {
var streamPlayer : MPMoviePlayerController = MPMoviePlayerController(contentURL: NSURL(string:"video url here"))
override func viewDidLoad() {
super.viewDidLoad()
streamPlayer.view.frame = self.view.bounds
self.view.addSubview(streamPlayer.view)
streamPlayer.fullscreen = true
// Play the movie!
streamPlayer.play()
}
}
Using AVPlayer :
import AVFoundation
var playerItem:AVPlayerItem?
var player:AVPlayer?
override func viewDidLoad() {
super.viewDidLoad()
let url = NSURL(string: "url of the audio or video")
playerItem = AVPlayerItem(URL: url!)
player=AVPlayer(playerItem: playerItem!)
let playerLayer=AVPlayerLayer(player: player!)
playerLayer.frame=CGRectMake(0, 0, 300, 50)
self.view.layer.addSublayer(playerLayer)
}
I have a play button to handle button tap.
playButton.addTarget(self, action: "playButtonTapped:", forControlEvents: .TouchUpInside)
func playButtonTapped(sender: AnyObject) {
if player?.rate == 0
{
player!.play()
playButton.setImage(UIImage(named: "player_control_pause_50px.png"), forState: UIControlState.Normal)
} else {
player!.pause()
playButton.setImage(UIImage(named: "player_control_play_50px.png"), forState: UIControlState.Normal)
}
}
I have added an observer listening for AVPlayerItemDidPlayToEndTimeNotification.
override func viewWillAppear(animated: Bool) {
NSNotificationCenter.defaultCenter().addObserver(self, selector: "finishedPlaying:", name: AVPlayerItemDidPlayToEndTimeNotification, object: playerItem)
}
override func viewWillDisappear(animated: Bool) {
NSNotificationCenter.defaultCenter().removeObserver(self)
}
When video/audio play is finished, reset button image and notification
func finishedPlaying(myNotification:NSNotification) {
playButton.setImage(UIImage(named: "player_control_play_50px.png"), forState: UIControlState.Normal)
let stopedPlayerItem: AVPlayerItem = myNotification.object as! AVPlayerItem
stopedPlayerItem.seekToTime(kCMTimeZero)
}

a bug(?!) in iOS10/Swift3/Xcode 8?
if let url = URL(string: "http://devstreaming.apple.com/videos/wwdc/2016/102w0bsn0ge83qfv7za/102/hls_vod_mvp.m3u8"){
let playerItem = AVPlayerItem(url: url)
let player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame=CGRect(x: 10, y: 10, width: 300, height: 300)
self.view.layer.addSublayer(playerLayer)
}
does not work (empty rect...)
this works:
if let url = URL(string: "http://devstreaming.apple.com/videos/wwdc/2016/102w0bsn0ge83qfv7za/102/hls_vod_mvp.m3u8"){
let player = AVPlayer(url: url)
let controller=AVPlayerViewController()
controller.player=player
controller.view.frame = self.view.frame
self.view.addSubview(controller.view)
self.addChildViewController(controller)
player.play()
}
Same URL...

Swift 3:
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var viewPlay: UIView!
var player : AVPlayer?
override func viewDidLoad() {
super.viewDidLoad()
let url : URL = URL(string: "http://static.videokart.ir/clip/100/480.mp4")!
player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.viewPlay.bounds
self.viewPlay.layer.addSublayer(playerLayer)
}
#IBAction func play(_ sender: Any) {
player?.play()
}
#IBAction func stop(_ sender: Any) {
player?.pause()
}
}

This worked for me in Swift 5
Just added sample video to the project from Sample Videos
Added action Buttons for playing videos from Website and Local with the following swift code example
import UIKit
import AVKit
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
//TODO : Make Sure Add and copy "SampleVideo.mp4" file in project before play
}
#IBAction func playWebVideo(_ sender: Any) {
guard let url = URL(string: "https://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4") else {
return
}
// Create an AVPlayer, passing it the HTTP Live Streaming URL.
let player = AVPlayer(url: url)
let controller = AVPlayerViewController()
controller.player = player
present(controller, animated: true) {
player.play()
}
}
#IBAction func playLocalVideo(_ sender: Any) {
guard let path = Bundle.main.path(forResource: "SampleVideo", ofType: "mp4") else {
return
}
let videoURL = NSURL(fileURLWithPath: path)
// Create an AVPlayer, passing it the local video url path
let player = AVPlayer(url: videoURL as URL)
let controller = AVPlayerViewController()
controller.player = player
present(controller, animated: true) {
player.play()
}
}
}

let videoUrl = //URL: Your Video URL
//Create player first using your URL
let yourplayer = AVPlayer(url: videoUrl)
//Create player controller and set it’s player
let playerController = AVPlayerViewController()
playerController.player = yourplayer
//Final step To present controller with player in your view controller
present(playerController, animated: true, completion: {
playerController.player!.play()
})

Swift 5.0
Improved from #ingconti answer . This worked for me.
if let url = URL(string: "urUrlString"){
let player = AVPlayer(url: url)
let avController = AVPlayerViewController()
avController.player = player
// your desired frame
avController.view.frame = self.view.frame
self.view.addSubview(avController.view)
self.addChild(avController)
player.play()
}

Custom VideoPlayer using ASVideoPlayer Library from github link : https://github.com/Asbat/ASVideoPlayer
// --------------------------------------------------------
// MARK:- variables
// --------------------------------------------------------
var videoPlayer = ASVideoPlayerController()
var videoData : [VideoModel] = []
var allVideoData : [AllVideoModel] = []
var cellHeights = [IndexPath: CGFloat]()
let loadingCellTableViewCellCellIdentifier = "LoadingCellTableViewCell"
var pauseIndexPath : Int = 0
var pageNumber = 1
var index = 0
var id = ""
var titleVideo = ""
var isUpdate = false
var myVideo : [MyVideo] = []
var imgs = [UIImage]()
var activityViewController : UIActivityViewController!
private var activityIndicator = NVActivityIndicatorView(frame: CGRect(x: 5, y: 5, width: 5, height: 5), type: .circleStrokeSpin, color: .systemBlue, padding: 5)
private let refreshControl = UIRefreshControl()
// --------------------------------------------------------
// MARK:- Outlets
// --------------------------------------------------------
#IBOutlet private var tableVideo: UITableView!
#IBOutlet private var _btnBack: UIButton!
#IBOutlet var _btnide: UIButton!
// ---------------------------------------------------------
// MARK:- Lifecycle
// ---------------------------------------------------------
override func viewDidLoad() {
super.viewDidLoad()
self._btnide.isHidden = true
tableVideo.rowHeight = UITableView.automaticDimension
tableVideo.separatorStyle = .none
tableVideo.delegate = self
tableVideo.dataSource = self
tableVideo.register(UINib(nibName: "VideoPlayerTableCell", bundle: nil), forCellReuseIdentifier: "VideoPlayerTableCell")
let cellNib = UINib(nibName:loadingCellTableViewCellCellIdentifier, bundle: nil)
tableVideo.register(cellNib, forCellReuseIdentifier: loadingCellTableViewCellCellIdentifier)
navigationController?.navigationBar.isHidden = true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
ASVideoPlayerController.sharedVideoPlayer.pausePlayeVideosFor(tableView: tableVideo)
tableVideo.scrollToRow(at: IndexPath(row: index, section: 0), at: .none, animated: true)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
tableVideo.scrollToRow(at: IndexPath(row: pauseIndexPath, section: 0), at: .none, animated: true)
puasVideoWhenPushVC(index: pauseIndexPath)
NotificationCenter.default.removeObserver(self)
if tableVideo.isHidden == true {
}
}
#IBAction func _onTapBackBtnAction(_ sender: UIButton) {
tableVideo.scrollToRow(at: IndexPath(row: pauseIndexPath, section: 0), at: .none, animated: true)
self.puasVideoWhenPushVC(index: pauseIndexPath)
navigationController?.popViewController(animated: true)
navigationController?.navigationBar.isHidden = false
}
// ---------------------------------------------------------------------
// MARK:- TableView Delegate & DataSource
// ---------------------------------------------------------------------
extension VideoPlayerVC :
UITableViewDelegate,UITableViewDataSource,UIScrollViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if isUpdate{
return videoData.count
}else{
return allVideoData.count
}
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if tableView == tableVideo {
return view.bounds.height
}else {
return UITableView.automaticDimension
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if tableView == tableVideo {
if let videoCell = cell as? ASAutoPlayVideoLayerContainer, let _ = videoCell.videoURL {
ASVideoPlayerController.sharedVideoPlayer.removeLayerFor(cell: videoCell)
}
}
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableVideo.dequeueReusableCell(withIdentifier: "VideoPlayerTableCell", for: indexPath) as! VideoPlayerTableCell
if isUpdate{
self.id = videoData[indexPath.row].id ?? ""
cell.configureCell(videoUrl: videoData[indexPath.row].videoLink )
}else{
self.id = allVideoData[indexPath.row].id ?? ""
cell.configureCell(videoUrl: allVideoData[indexPath.row].videoLink)
}
cell.btnPlayPause.isSelected = false
cell.btnPlayPause.tag = indexPath.row
cell.btnPlayPause.addTarget(self, action: #selector(didTapPlayPauseButton(_:)), for: .touchUpInside)
cell.btnPlayPause.setImage(UIImage(named: ""), for: .normal)
cell.btnPlayPause.setImage(UIImage(named: "btn_play_video"), for: .selected)
cell.btnUseNow.tag = indexPath.row
cell.btnUseNow.addTarget(self, action: #selector(btnUseNowTapped(sender:)), for: .touchUpInside)
cell.btnShare.tag = indexPath.row
cell.btnShare.addTarget(self, action: #selector(btnShareTapped(sender:)), for: .touchUpInside)
cell.btnSave.tag = indexPath.row
cell.btnSave.addTarget(self, action: #selector(btnSaveTapped(sender:)), for: .touchUpInside)
pauseIndexPath = indexPath.row
return cell
}
#objc func btnUseNowTapped(sender: UIButton){
self._btnide.isHidden = false
self.pausePlayeVideos()
let editVC = EditVideoVC()
var fileName : String = kEmptyString
if self.isUpdate{
editVC.videoString = self.videoData[sender.tag].videoLink ?? kEmptyString
editVC.id = self.videoData[sender.tag].id ?? kEmptyString
editVC.titleVideo = self.videoData[sender.tag].title ?? kEmptyString
fileName = self.videoData[sender.tag].videoZip ?? kEmptyString
guard !FileManager.isExist(id: self.videoData[sender.tag].id ?? kEmptyString) else{
print("File Downloaded")
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
return }
FileManager.download(id: self.videoData[sender.tag].id ?? kEmptyString, url: fileName) { (url) in
guard url != nil else {
print("not download")
return
}
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
}
}
else{
editVC.videoString = self.allVideoData[sender.tag].videoLink ?? kEmptyString
editVC.id = self.allVideoData[sender.tag].id ?? kEmptyString
editVC.titleVideo = self.allVideoData[sender.tag].title ?? kEmptyString
fileName = self.allVideoData[sender.tag].videoZip ?? kEmptyString
guard !FileManager.isExist(id: self.allVideoData[sender.tag].id ?? kEmptyString) else{
print("File Downloaded")
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
return }
FileManager.download(id: self.allVideoData[sender.tag].id ?? kEmptyString, url: fileName) { (url) in
guard url != nil else {
print("not download")
return
}
self.puasVideoWhenPushVC(index: sender.tag)
self.navigationController?.pushViewController(editVC, animated: true)
}
}
}
#objc func btnShareTapped(sender: UIButton){
if self.isUpdate{
let video = ["\(String(describing: self.videoData[sender.tag].videoLink))"]
self.activityViewController = UIActivityViewController(activityItems: video, applicationActivities: nil)
self.activityViewController.setValue("Video Share", forKey: "subject")
self.activityViewController.popoverPresentationController?.sourceView = self.view
self.activityViewController.excludedActivityTypes = [ UIActivity.ActivityType.airDrop, UIActivity.ActivityType.postToTwitter, UIActivity.ActivityType.addToReadingList, UIActivity.ActivityType.assignToContact,UIActivity.ActivityType.copyToPasteboard,UIActivity.ActivityType.mail,UIActivity.ActivityType.markupAsPDF,UIActivity.ActivityType.message,UIActivity.ActivityType.postToFacebook,UIActivity.ActivityType.postToFlickr,UIActivity.ActivityType.postToTencentWeibo,UIActivity.ActivityType.postToVimeo,UIActivity.ActivityType.postToWeibo,UIActivity.ActivityType.saveToCameraRoll]
self.present(self.activityViewController, animated: true, completion: nil)
}
else{
let categoryVideo = ["\(String(describing: self.allVideoData[sender.tag].videoLink))"]
self.activityViewController = UIActivityViewController(activityItems: categoryVideo, applicationActivities: nil)
self.activityViewController.setValue("Video Share", forKey: "subject")
self.activityViewController.popoverPresentationController?.sourceView = self.view
self.activityViewController.excludedActivityTypes = [ UIActivity.ActivityType.airDrop, UIActivity.ActivityType.postToTwitter]
self.present(self.activityViewController, animated: true, completion: nil)
}
}
#objc func btnSaveTapped(sender: UIButton){
if self.isUpdate{
self.downloadVideos(video: self.videoData[sender.tag].videoLink ?? kEmptyString)
}else{
self.downloadVideos(video: self.allVideoData[sender.tag].videoLink ?? kEmptyString)
}
}
private func downloadVideos(video : String){
Alamofire.request(video).downloadProgress(closure : { (progress) in
}).responseData{ (response) in
///# Create folder in documetn directory #///
if let data = response.result.value{
let path = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString).appendingPathComponent("Videos")
if !FileManager.default.fileExists(atPath: path) {
try! FileManager.default.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil)
}
let fileURL = URL(fileURLWithPath:path).appendingPathComponent("\(self.id)/\(self.titleVideo)/output.mp4")
print(fileURL)
do{
try data.write(to: fileURL, options: .atomic)
}catch{
print("could not download")
}
print(fileURL)
}
}
}
// ----------------------------------------------------------------------
// MARK:- Scrollview Method
// ----------------------------------------------------------------------
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
if scrollView == tableVideo {
pauseIndexPath = Int(scrollView.contentOffset.y / scrollView.frame.size.height)
pausePlayeVideos()
}
}
func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) {
if scrollView == tableVideo {
if !decelerate {
pausePlayeVideos()
}
}
}
// ----------------------------------------------------------------------
// MARK:- Function Pause & Play Button
// ----------------------------------------------------------------------
func puasVideoWhenPushVC (index : NSInteger) {
if isUpdate{
guard let cell = tableVideo.cellForRow(at: IndexPath(row: index, section: 0)) as? VideoPlayerTableCell else { return }
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: videoData[index].videoLink ?? "")
}
else{
guard let cell = tableVideo.cellForRow(at: IndexPath(row: index, section: 0)) as? VideoPlayerTableCell else { return }
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: allVideoData[index].videoLink ?? "")
}
}
#objc func pausePlayeVideos(){
ASVideoPlayerController.sharedVideoPlayer.pausePlayeVideosFor(tableView: tableVideo)
}
#objc func appEnteredFromBackground() {
ASVideoPlayerController.sharedVideoPlayer.pausePlayeVideosFor(tableView: tableVideo, appEnteredFromBackground: true)
}
#objc func didTapPlayPauseButton(_ sender: UIButton) {
guard let cell = tableVideo.cellForRow(at: IndexPath(row: sender.tag, section: 0)) as? VideoPlayerTableCell else { return }
if sender.isSelected {
if isUpdate{
ASVideoPlayerController.sharedVideoPlayer.playVideo(withLayer: cell.videoLayer, url: videoData[sender.tag].videoLink ?? "")
}else{
ASVideoPlayerController.sharedVideoPlayer.playVideo(withLayer: cell.videoLayer, url: allVideoData[sender.tag].videoLink ?? "")
}
} else {
if isUpdate{
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: videoData[sender.tag].videoLink ?? "")
}else{
ASVideoPlayerController.sharedVideoPlayer.pauseVideo(forLayer: cell.videoLayer, url: allVideoData[sender.tag].videoLink ?? "")
}
}
sender.isSelected = !sender.isSelected
}

Swift 5
#IBAction func buttonPressed(_ sender: Any) {
let videoURL = course.introductionVideoURL
let player = AVPlayer(url: videoURL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
present(playerViewController, animated: true, completion: {
playerViewController.player!.play()
})
// here the course includes a model file, inside it I have given the url, so I am calling the function from model using course function.
// also introductionVideoUrl is a URL which I declared inside model .
var introductionVideoURL: URL
Also alternatively you can use the below code instead of calling the function from model
Replace this code
let videoURL = course.introductionVideoURL
with
guard let videoURL = URL(string: "https://something.mp4) else {
return

Related

AVPlayerViewController control in iPhoneX

I am not able to find the solution to display AVPlayerViewController control in iPhoneX. By using AVPlayerViewController.gravity = .aspectfill shows control but video fills whole screen and clips remaining portions. Any idea?? Here is my code:
func loadVideoURL () {
let fileURL = URL.init(string: (exerciseDetailsObj?.video_file)!)
avPlayer = AVPlayer(url: fileURL!)
let playerController = AVPlayerViewController()
playerController.player = avPlayer
playerController.view.frame = CGRect(x: 0, y: 0, width:viewPlayerContainer.bounds.size.width, height: viewPlayerContainer.bounds.size.height)
playerController.showsPlaybackControls = false
avPlayerController = playerController
self.addChild(avPlayerController)
viewPlayerContainer.addSubview(avPlayerController.view)
viewPlayerContainer.sendSubviewToBack(avPlayerController.view)
avPlayerController.didMove(toParent: self)
avPlayerController.addObserver(self, forKeyPath: "videoBounds", options: NSKeyValueObservingOptions.new, context: nil)
}
}
func enterFullscreen(playerViewController: AVPlayerViewController) {
playerViewController.showsPlaybackControls = true //not dispalyed in iphonX
let selectorName: String = {
if #available(iOS 11.3, *) {
return "_transitionToFullScreenAnimated:interactive:completionHandler:"
} else if #available(iOS 11, *) {
return "_transitionToFullScreenAnimated:completionHandler:"
} else {
return "_transitionToFullScreenViewControllerAnimated:completionHandler:"
}
}()
let selectorToForceFullScreenMode = NSSelectorFromString(selectorName)
if playerViewController.responds(to: selectorToForceFullScreenMode) {
playerViewController.perform(selectorToForceFullScreenMode, with: true, with: nil)
}
}
func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?)
{
if keyPath == "videoBounds"
{
if let rect = change?[.newKey] as? NSValue
{
if let newrect = rect.cgRectValue as CGRect?
{
if newrect.size.height <= 214
{
avPlayerController.showsPlaybackControls = false
}
}
}
}
}
Check out this. Refer the code and change accordingly into your code.
#IBOutlet weak var videoPlayerView: UIView! // Take UIview on storyboard on which you want to display video
var player: AVPlayer?
var playerController = AVPlayerLayer()
override func viewDidLoad()
{
self.videoPlayerView.layer.insertSublayer(playerController, at: 0)
player = AVPlayer(url: videoUrl!)
playerController.player = player
self.player?.play()
}
override func viewDidLayoutSubviews() {
playerController.videoGravity = AVLayerVideoGravity(rawValue: AVLayerVideoGravity.resizeAspectFill.rawValue)
playerController.frame = self.videoPlayerView.bounds
}

How to add new items to AVQueuePlayer currently playing with old items

Adding New Items To AVQueuePlayer Doesn't Play
I have scenarios where I'm streaming HLS audios from server. Initially I make an Array of AVPlayerItem. And initialise the AVQueuePlayer, And on button clicks it plays initial items very fine. But down the road when half of my items finishes playing I am trying to add more items at the end of list which doesn't play.
class ViewController: UIViewController, AVAudioPlayerDelegate{
let baseUrl = "********"
var player: AVQueuePlayer?
var items: [AVPlayerItem] = [];
let controller = AVPlayerViewController()
var counter = 0;
override func viewDidLoad() {
super.viewDidLoad()
NotificationCenter.default.addObserver(self,
selector: #selector(animationDidFinish(_:)),
name: .AVPlayerItemDidPlayToEndTime,
object: player?.items())
}
override func viewDidAppear(_ animated: Bool) {
queueMaker()
}
#IBAction func btnTapped(_ sender: UIButton) {
let player = AVQueuePlayer(items: self.items)
let playerLayer = AVPlayerLayer(player: player)
self.view.layer.addSublayer(playerLayer)
player.play()
}
#objc func animationDidFinish(_ notification: NSNotification) {
print("HLS : AVQueuePlayer Item Finished Playing")
counter += 1
if ((items.count) - counter) < 4 {
print("HLS : Adding More Items")
addMoreItemsToQueue()
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
func queueMaker() {
let listOfAyahs: [String] = ["1/1.m3u8","2/2.m3u8","3/3.m3u8","4/4.m3u8","5/5.m3u8","6/6.m3u8","7/7.m3u8"]
for item in listOfAyahs {
let stringUrl = "\(self.baseUrl)\(item)"
let url = URL(string: stringUrl)
let item = AVPlayerItem(url: url!)
items.append(item)
}
}
func addMoreItemsToQueue() {
let listOfAyahs: [String] = ["8/8.m3u8","9/9.m3u8","10/10.m3u8","11/11.m3u8","12/12.m3u8","13/13.m3u8","14/14.m3u8"]
for item in listOfAyahs {
let stringUrl = "\(self.baseUrl)\(item)"
let url = URL(string: stringUrl)
let item = AVPlayerItem(url: url!)
items.append(item)
}
}
}
You are initialising a AVQuePlayer with some items which is the base copy for the AVQuePlayer and later you are appending new AVPlayerItems to your items array, about which the AVQuePlayer is not aware of. So, update the following to function and it should work.
#IBAction func btnTapped(_ sender: UIButton) {
// UPDATE THE PLAYER's SCOPE TO GLOBAL
player = AVQueuePlayer(items: self.items)
let playerLayer = AVPlayerLayer(player: player)
self.view.layer.addSublayer(playerLayer)
player?.play()
}
And add replace insert items to the queue like this
func addMoreItemsToQueue() {
let listOfAyahs: [String] = ["8/8.m3u8","9/9.m3u8","10/10.m3u8","11/11.m3u8","12/12.m3u8","13/13.m3u8","14/14.m3u8"]
var lastItem = self.player?.items().last
for item in listOfAyahs {
let stringUrl = "\(self.baseUrl)\(item)"
let url = URL(string: stringUrl)
let item = AVPlayerItem(url: url!)
self.player?.insert(item, after: lastItem)
lastItem = item
}
}
Try and share the results.

How to manage the AVAudioPlayer isPlaying in each tableviewCell?

I have a tableview and have multiple cells in tableView.
Each cell has an item of AVAudioPlayer
And I face a problem.
I don't know how to manage the AVAudioPlayer.
When I play first AVAudioPlayer, and then play second AVAudioPlayer, the sound will overlap.
How to stop first AVAudioPlayer in my customized cell, and play second AVAudioPlayer?
Thanks.
This is my customized cell:
class TableViewCell: UITableViewCell {
#IBOutlet weak var myImageView: UIImageView!
#IBOutlet weak var myChatBubbleView: UIView!
#IBOutlet weak var myDateLabel: UILabel!
#IBOutlet weak var mySecondLabel: UILabel!
#IBOutlet weak var myRecordPlayerBtn: MenuButton!
private var timer:Timer?
private var elapsedTimeInSecond:Int = 0
var audioPlayer:AVAudioPlayer?
var message:ChatroomMessage?
var chatroomId:String = ""
var delegate:PlayRecordDelegate?
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
self.backgroundColor = defaultBackgroundColor
self.tintColor = defaultChatroomCheckButtonColor
myImageView.layer.masksToBounds = true
myImageView.layer.cornerRadius = defaultIconRadius
myChatBubbleView.backgroundColor = defaultChatGreenBubbleColor
myChatBubbleView.layer.cornerRadius = defaultButtonRadius
myDateLabel.textColor = defaultChatTimeColor
mySecondLabel.textColor = defaultChatTimeColor
mySecondLabel.isHidden = true
myRecordPlayerBtn.imageView?.animationDuration = 1
myRecordPlayerBtn.imageView?.animationImages = [
UIImage(named: "img_myRocordPlaying1")!,
UIImage(named: "img_myRocordPlaying2")!,
UIImage(named: "img_myRocordPlaying3")!
]
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func loadByMessage(_ message:ChatroomMessage, chatroomId:String) {
self.message = message
self.chatroomId = chatroomId
myRecordPlayerBtn.addTarget(self, action: #selector(recordPlay), for: .touchUpInside)
}
func resetRecordAnimation() {
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.isSelected = false
}
func recordPlay(_ sender: UIButton) {
self.myRecordPlayerBtn.imageView?.startAnimating()
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(chatroomId)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
updateTimeLabel()
startTimer()
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
audioPlayer?.delegate = self
audioPlayer?.play()
}else{
//don't have file in local
let recordUrl = URL(string: (message?.content)!)
URLSession.shared.downloadTask(with: recordUrl!, completionHandler: { (location, response, error) in
guard
let httpURLResponse = response as? HTTPURLResponse, httpURLResponse.statusCode == 200,
let mimeType = response?.mimeType, mimeType.hasPrefix("audio"),
let location = location, error == nil
else { return }
do {
try FileManager.default.moveItem(at: location, to: fileURL)
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
DispatchQueue.main.async {
self.updateTimeLabel()
self.startTimer()
}
self.audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
self.audioPlayer?.delegate = self
self.audioPlayer?.play()
} catch {
print(error)
}
}).resume()
}
}
func startTimer() {
timer?.invalidate()
timer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { (timer) in
self.elapsedTimeInSecond -= 1
self.updateTimeLabel()
})
}
func resetTimer(second:Int) {
timer?.invalidate()
elapsedTimeInSecond = second
updateTimeLabel()
}
func updateTimeLabel() {
let seconds = elapsedTimeInSecond % 60
let minutes = (elapsedTimeInSecond/60) % 60
mySecondLabel.isHidden = false
mySecondLabel.text = String(format: "%02d:%02d", minutes,seconds)
}
}
extension TableViewCell:AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(Id)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
DispatchQueue.main.async {
self.resetTimer(second: Int(audioDurationSeconds))
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.imageView?.image = #imageLiteral(resourceName: "img_myRocordDefault")
}
}
}
}
Probably first initialize to check if your player is playing
if audioPlayer != nil{
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
}
If you don't want to play two audio track at the same time, you should use a shared instance of AVAudioPlayer
It will be better for performances and you can define the instance as static var in your controller. It will be accessible in each cell.
I have developed a music palyer application, and I used a shared instance in the MusicPlayManager:
class MusicPlayManager{
var player : AVAudioPlayer?
static let sharedInstance = MusicPlayManager.init()
private override init() {
super.init()
}
// something else, such as palyNext, playPrevious methods
}
In your viewController,you can use MusicPlayManager.sharedInstance.player

How do I play many audio files one by one

I created the 4 UIButtons on the Main StoryBoard.
I would like "Button4" to implement the other buttons function in a row. That means when I press button 4, player 1 should be played first, after that player 2 and after that player 3.
However, when I press "Button4", "Button2" and "Button3" are played at same time.
fileprivate var player1:AVAudioPlayer?
fileprivate var player2:AVAudioPlayer?
fileprivate var player3:AVAudioPlayer?
let url1 = Bundle.main.bundleURL.appendingPathComponent("music1.mp3")
let url2 = Bundle.main.bundleURL.appendingPathComponent("music2.mp3")
let url3 = Bundle.main.bundleURL.appendingPathComponent("music3.mp3")
#IBAction func pushButton1(sender: UIButton) {
Player(url: url1)
}
#IBAction func pushButton2(sender: UIButton) {
Player1(url: url2)
}
#IBAction func pushButton3(_ sender: UIButton) {
Player2(url: url1, url2: url2, url3: url3)
}
//"yourButton2" and "yourButton3" is played at same time in this code at player2
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
if (player === player1) {
yourButton.isSelected = false
} else if (player === player2) {
yourButton2.isSelected = false
} else if (player === player3) {
yourButton.isSelected = false
player2!.play()
yourButton2.isSelected = true
player2!.play()
yourButton3.isSelected = true
player1!.play()
}
}
func Player(url: URL) {
do {
try player1 = AVAudioPlayer(contentsOf:url)
player1!.play()
yourButton.isSelected = true
player1!.delegate = self
} catch {
print(error)
}
}
func Player1(url: URL) {
do {
try player2 = AVAudioPlayer(contentsOf:url)
player2!.play()
yourButton2.isSelected = true
player2!.delegate = self
} catch {
print(error)
}
}
func Player2(url: URL, url2: URL, url3: URL) {
do {
try player3 = AVAudioPlayer(contentsOf:url)
try player2 = AVAudioPlayer(contentsOf: url2)
try player1 = AVAudioPlayer(contentsOf: url3)
player3!.play()
yourButton.isSelected = true
player3!.delegate = self
player2!.delegate = self
player1!.delegate = self
} catch {
print(error)
}
}
Rather than using an AVAudioPlayer, how about an AVQueuePlayer?
Here's a quick example:
var files = ["file1", "file2", "file3"]
var player: AVQueuePlayer = {
var pathArray = [String]()
files.forEach { resource in
if let path = Bundle.main.path(forResource: resource, ofType: "mp3") {
pathArray.append(path)
}
}
var urlArray = [URL]()
pathArray.forEach { path in
urlArray.append(URL(fileURLWithPath: path))
}
var playerItems = [AVPlayerItem]()
urlArray.forEach { url in
playerItems.append(AVPlayerItem(url: url))
}
let player = AVQueuePlayer(items: playerItems)
player.actionAtItemEnd = AVPlayerActionAtItemEnd.advance
return player
}()
and in your button's action:
#IBAction func buttonTapped(_ sender: UIButton) {
files = ["file2", "file3", "file1"]
player.play()
}
Admittedly, this is pretty quick and dirty, but something kind of like this because we're passing the files array into the player. It shouldn't be too difficult to find more optimization for this code.
EDIT: realized I wasn't passing in an array of AVPlayerItems, so updated.
Edit....
Each of the buttons, 1, 2, 3 etc...will work but "push button 2" is a bit of a mix...
Anyway, reexplain?

Need to show the image of video in mpmovieplayercontroller swift

I have already asked this doubt in my co workers profile, but no solution yet get. Please help me to solve this problem. i am struck more than 4 hours.
So far i did:
I use to choose the video from gallery and display that in screen using mpmovieplayercontroller. Here i am choosing 2 video one by one. If i select first video and to display and when i move to load second video my first video screen is in black. But i need to display the video image in that player. How to do that.please help me out
My code:
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia
class ViewController: UIViewController,UIGestureRecognizerDelegate {
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false
// swt duplicate image for thumbnail image for audio
#IBOutlet weak var musicImg: UIImageView!
var videoPlayer = MPMoviePlayerController()
var mediaUI = UIImagePickerController()
var videoURL = NSURL()
override func viewDidLoad() {
super.viewDidLoad()
musicImg.hidden = true
let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")
gestureRecognizer.delegate = self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
}
func handleTap(gestureRecognizer: UIGestureRecognizer) {
//location = gestureRecognizer .locationInView(videoPlayer.view)
print("tapped")
}
//#pragma mark - gesture delegate
// this allows you to dispatch touches
func gestureRecognizer(gestureRecognizer: UIGestureRecognizer, shouldReceiveTouch touch: UITouch) -> Bool {
return true
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWithGestureRecognizer otherGestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
return false
}
let mediaUI = UIImagePickerController()
mediaUI.sourceType = .SavedPhotosAlbum
mediaUI.mediaTypes = [kUTTypeMovie as String]
mediaUI.allowsEditing = true
mediaUI.delegate = delegate
presentViewController(mediaUI, animated: true, completion: nil)
return true
}
// after merge all video and audio. the final video will be saved in gallery and also will display like preview
func exportDidFinish(session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.Completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
if error != nil {
print("some files went wrong")
} else {
// get the output url to display the final video in screen
self.videoURL = outputURL!
self.mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = self.videoURL
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.backgroundView.backgroundColor = UIColor.clearColor()
self.videoPlayer.fullscreen = true
self.videoPlayer.view.frame = CGRectMake(38, 442, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.prepareToPlay()
}
})
}
}
Asset1 = nil
Asset2 = nil
Asset3 = nil
audioAsset = nil
}
// click first video
#IBAction func FirstVideo(sender: AnyObject) {
loadingAssetOne = true
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// clcik second video
#IBAction func SecondVideo(sender: AnyObject) {
loadingAssetOne = false
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// click audio
#IBAction func Audio(sender: AnyObject) {
let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
mediaPickerController.delegate = self
mediaPickerController.prompt = "Select Audio"
presentViewController(mediaPickerController, animated: true, completion: nil)
}
#IBAction func playPreview(sender: AnyObject) {
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// orientation for the video
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.Up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .Right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .Left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .Up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .Down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
atTime: kCMTimeZero)
} else {
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
if assetInfo.orientation == .Down {
let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
let windowBounds = UIScreen.mainScreen().bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
}
instruction.setTransform(concat, atTime: kCMTimeZero)
}
return instruction
}
// merge all file
#IBAction func MergeAll(sender: AnyObject) {
if let firstAsset = Asset1, secondAsset = Asset2 {
let mixComposition = AVMutableComposition()
//load first video
let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
atTime: kCMTimeZero)
} catch _ {
}
// load second video
let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
atTime: firstAsset.duration)
} catch _ {
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: UIScreen.mainScreen().bounds.width, height: UIScreen.mainScreen().bounds.height)
//load audio
if let loadedAudioAsset = audioAsset {
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] ,
atTime: kCMTimeZero)
} catch _ {
}
}
// save the final video to gallery
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .ShortStyle
let date = dateFormatter.stringFromDate(NSDate())
// let savePath = documentDirectory.URLByAppendingPathComponent("mergeVideo-\(date).mov")
let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("final-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.shouldOptimizeForNetworkUse = true
exporter!.videoComposition = mainComposition
exporter!.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter!)
})
}
}
}
}
extension ViewController: UIImagePickerControllerDelegate {
// display the first & second video after it picked from gallery
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) {
let mediaType = info[UIImagePickerControllerMediaType] as! NSString
dismissViewControllerAnimated(true, completion: nil)
if mediaType == kUTTypeMovie {
let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
if loadingAssetOne {
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = videoURL
self.videoPlayer.view.frame = CGRectMake(38, 57, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.prepareToPlay()
self.videoPlayer.play()
Asset1 = avAsset
} else {
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
self.videoPlayer = MPMoviePlayerController()
self.videoPlayer.contentURL = videoURL
self.videoPlayer.view.frame = CGRectMake(38, 206, 220, 106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.controlStyle = .Embedded
self.videoPlayer.scalingMode = .AspectFill
self.videoPlayer.shouldAutoplay = true
self.videoPlayer.prepareToPlay()
Asset2 = avAsset
}
}
}
}
extension ViewController: UINavigationControllerDelegate {
}
extension ViewController: MPMediaPickerControllerDelegate {
func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
let selectedSongs = mediaItemCollection.items
if selectedSongs.count > 0 {
let song = selectedSongs[0]
if let vURL = song.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL {
audioAsset = AVAsset(URL: vURL)
dismissViewControllerAnimated(true, completion: nil)
mediaUI.dismissViewControllerAnimated(true, completion: nil)
musicImg.hidden = false
let alert = UIAlertController(title: "yes", message: "Audio Loaded", preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
presentViewController(alert, animated: true, completion: nil)
} else {
dismissViewControllerAnimated(true, completion: nil)
let alert = UIAlertController(title: "No audio", message: "Audio Not Loaded", preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
presentViewController(alert, animated: true, completion: nil)
}
} else {
dismissViewControllerAnimated(true, completion: nil)
}
}
func mediaPickerDidCancel(mediaPicker: MPMediaPickerController) {
dismissViewControllerAnimated(true, completion: nil)
}
}
I need to show the image of selected video in screen.Please help me to resolve,Thanks
I follow this link tutorial here
If want to play more than one video in same view, you can use AVPlayer of AVFoundation.I have read more about this in StackOverflow and AppleDoucument and Other forums.All they say is not possible using MPMoviePlayerViewController.But it is possible using AVPlayer.
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia
class ViewController: UIViewController,UIGestureRecognizerDelegate {
var optionalInteger: Int?
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false
#IBOutlet weak var musicImg: UIImageView!
#IBOutlet var videoView: UIView!
#IBOutlet var videoViewTwo: UIView!
var player : AVPlayer? = nil
var playerLayer : AVPlayerLayer? = nil
var asset : AVAsset? = nil
var playerItem: AVPlayerItem? = nil
override func viewDidLoad()
{
super.viewDidLoad()
musicImg.hidden = true
let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")
gestureRecognizer.delegate = self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
imageViewVideoOne.hidden = true
imageViewVideoTwo.hidden = true
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool
{
if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
return false
}
let mediaUI = UIImagePickerController()
mediaUI.sourceType = .SavedPhotosAlbum
mediaUI.mediaTypes = [kUTTypeMovie as String]
mediaUI.allowsEditing = true
mediaUI.delegate = delegate
presentViewController(mediaUI, animated: true, completion: nil)
return true
}
// click first video
#IBAction func FirstVideo(sender: AnyObject) {
loadingAssetOne = true
optionalInteger = 0
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// clcik second video
#IBAction func SecondVideo(sender: AnyObject) {
loadingAssetOne = false
optionalInteger = 1
startMediaBrowserFromViewController(self, usingDelegate: self)
}
// click audio
#IBAction func Audio(sender: AnyObject) {
let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
mediaPickerController.delegate = self
mediaPickerController.prompt = "Select Audio"
presentViewController(mediaPickerController, animated: true, completion: nil)
}
#IBAction func playPreview(sender: AnyObject)
{
startMediaBrowserFromViewController(self, usingDelegate: self)
}
extension ViewController: UIImagePickerControllerDelegate
{
// display the first & second video after it picked from gallery
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject])
{
let mediaType = info[UIImagePickerControllerMediaType] as! NSString
dismissViewControllerAnimated(true, completion: nil)
if mediaType == kUTTypeMovie
{
if loadingAssetOne
{
let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
print(avAsset)
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
self.videoURL = videoURLWithPath!
asset = AVAsset(URL: videoURL) as AVAsset
playerItem = AVPlayerItem(asset: asset!)
player = AVPlayer (playerItem: self.playerItem!)
playerLayer = AVPlayerLayer(player: self.player)
videoView.frame = CGRectMake(38, 57, 220, 106)
playerLayer?.frame = videoView.frame
videoView.layer.addSublayer(self.playerLayer!)
player!.play()
}
else
{
let avAssetTwo = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
print(avAssetTwo)
if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
self.videoURL = vURL
} else {
print("oops, no url")
}
mediaUI.dismissViewControllerAnimated(true, completion: nil)
let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
self.videoURL = videoURLWithPath!
asset = AVAsset(URL: videoURL) as AVAsset
playerItem = AVPlayerItem(asset: asset!)
player = AVPlayer (playerItem: self.playerItem!)
playerLayer = AVPlayerLayer(player: self.player)
videoView.frame = CGRectMake(38, 206, 220, 106)
playerLayer?.frame = videoView.frame
videoView.layer.addSublayer(self.playerLayer!)
player!.play()
}
}
}
}
Sources for your question
Multiple Video same screen
Multiple Video Playback on iOS
Apple MPMoviePlayerViewController Document
Playing Multiple Video Simultaneously
Apple AVPlayer Document
Playing Multiple Videos
Playback
Playing Video in iOS

Resources