How to play a local video with Swift? - ios

I have a short mp4 video file that I've added to my current Xcode6 Beta project.
I want to play the video in my app.
After hours searching, I can't find anything remotely helpful. Is there a way to accomplish this with Swift or do you have to use Objective-C?
Can I get pointed in the right direction? I can't be the only one wondering this.

Sure you can use Swift!
1. Adding the video file
Add the video (lets call it video.m4v) to your Xcode project
2. Checking your video is into the Bundle
Open the Project Navigator cmd + 1
Then select your project root > your Target > Build Phases > Copy Bundle Resources.
Your video MUST be here. If it's not, then you should add it using the plus button
3. Code
Open your View Controller and write this code.
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
playVideo()
}
private func playVideo() {
guard let path = Bundle.main.path(forResource: "video", ofType:"m4v") else {
debugPrint("video.m4v not found")
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerController = AVPlayerViewController()
playerController.player = player
present(playerController, animated: true) {
player.play()
}
}
}

another Swift 3 Example. The provided solution did not work for me.
private func playVideo(from file:String) {
let file = file.components(separatedBy: ".")
guard let path = Bundle.main.path(forResource: file[0], ofType:file[1]) else {
debugPrint( "\(file.joined(separator: ".")) not found")
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
}
useage:
playVideo(from: "video.extension")
Note:
Check Copy Bundle Resources under Build Phases to ensure that the video is available to the Project.

U can setup AVPlayer in another way, that open for u full customization of your video Player screen
Swift 2.3
Create UIView subclass for playing video (basically u can use any UIView object and only needed is AVPlayerLayer. I setup in this way because it much clearer for me)
import AVFoundation
import UIKit
class PlayerView: UIView {
override class func layerClass() -> AnyClass {
return AVPlayerLayer.self
}
var player:AVPlayer? {
set {
if let layer = layer as? AVPlayerLayer {
layer.player = player
}
}
get {
if let layer = layer as? AVPlayerLayer {
return layer.player
} else {
return nil
}
}
}
}
Setup your player
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext:UnsafeMutablePointer<Void> = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 1.0 {
didSet {
if let player = assetPlayer {
player.volume = volume > 0 ? volume : 0.0
}
}
}
// MARK: - Init
convenience init(urlAsset:NSURL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView, let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
}
initialSetupWithURL(urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seekToTime(CMTimeMakeWithSeconds(seconds, timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .ReadyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NSNotificationCenter.defaultCenter().removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronouslyForKeys(keys, completionHandler: {
dispatch_async(dispatch_get_main_queue(), {
self.startLoading()
})
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
let status:AVKeyValueStatus = asset.statusOfValueForKey("tracks", error: &error)
if status == AVKeyValueStatus.Loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .Initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.New, .Old], context: videoContext)
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(playerItemDidReachEnd), name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(didFailedToPlayToEnd), name: AVPlayerItemFailedToPlayToEndTimeNotification, object: nil)
if let output = videoOutput {
item.addOutput(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
print("player created")
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(1, 1)
if let player = assetPlayer {
player.addPeriodicTimeObserverForInterval(timeInterval, queue: dispatch_get_main_queue(), usingBlock: { (time) in
self.playerDidChangeTime(time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seekToTime(kCMTimeZero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayerStatus) {
if status == .Failed {
print("Failed to load video")
} else if status == .ReadyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:NSTimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.CMTimeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url:NSURL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
urlAsset = AVURLAsset(URL: url, options: options)
}
// MARK: - Observations
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if context == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(item.loadedTimeRanges)
}
}
}
}
}
Usage:
assume u have view
#IBOutlet private weak var playerView: PlayerView!
private var videoPlayer:VideoPlayer?
and in viewDidLoad()
private func preparePlayer() {
if let filePath = NSBundle.mainBundle().pathForResource("intro", ofType: "m4v") {
let fileURL = NSURL(fileURLWithPath: filePath)
videoPlayer = VideoPlayer(urlAsset: fileURL, view: playerView)
if let player = videoPlayer {
player.playerRate = 0.67
}
}
}
Objective-C
PlayerView.h
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
/*!
#class PlayerView
#discussion Represent View for playinv video. Layer - PlayerLayer
#availability iOS 7 and Up
*/
#interface PlayerView : UIView
/*!
#var player
#discussion Player object
*/
#property (strong, nonatomic) AVPlayer *player;
#end
PlayerView.m
#import "PlayerView.h"
#implementation PlayerView
#pragma mark - LifeCycle
+ (Class)layerClass
{
return [AVPlayerLayer class];
}
#pragma mark - Setter/Getter
- (AVPlayer*)player
{
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player
{
[(AVPlayerLayer *)[self layer] setPlayer:player];
}
#end
VideoPlayer.h
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
#import "PlayerView.h"
/*!
#protocol VideoPlayerDelegate
#discussion Events from VideoPlayer
*/
#protocol VideoPlayerDelegate <NSObject>
#optional
/*!
#brief Called whenever time when progress of played item changed
#param progress
Playing progress
*/
- (void)progressDidUpdate:(CGFloat)progress;
/*!
#brief Called whenever downloaded item progress changed
#param progress
Playing progress
*/
- (void)downloadingProgress:(CGFloat)progress;
/*!
#brief Called when playing time changed
#param time
Playing progress
*/
- (void)progressTimeChanged:(CMTime)time;
/*!
#brief Called when player finish play item
*/
- (void)playerDidPlayItem;
/*!
#brief Called when player ready to play item
*/
- (void)isReadyToPlay;
#end
/*!
#class VideoPlayer
#discussion Video Player
#code
self.videoPlayer = [[VideoPlayer alloc] initVideoPlayerWithURL:someURL playerView:self.playerView];
[self.videoPlayer prepareToPlay];
self.videoPlayer.delegate = self; //optional
//after when required play item
[self.videoPlayer play];
#endcode
*/
#interface VideoPlayer : NSObject
/*!
#var delegate
#abstract Delegate for VideoPlayer
#discussion Set object to this property for getting response and notifications from this class
*/
#property (weak, nonatomic) id <VideoPlayerDelegate> delegate;
/*!
#var volume
#discussion volume of played asset
*/
#property (assign, nonatomic) CGFloat volume;
/*!
#var autoRepeat
#discussion indicate whenever player should repeat content on finish playing
*/
#property (assign, nonatomic) BOOL autoRepeat;
/*!
#brief Create player with asset URL
#param urlAsset
Source URL
#result
instance of VideoPlayer
*/
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset;
/*!
#brief Create player with asset URL and configure selected view for showing result
#param urlAsset
Source URL
#param view
View on wchich result will be showed
#result
instance of VideoPlayer
*/
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset playerView:(PlayerView *)view;
/*!
#brief Call this method after creating player to prepare player to play
*/
- (void)prepareToPlay;
/*!
#brief Play item
*/
- (void)play;
/*!
#brief Pause item
*/
- (void)pause;
/*!
#brief Stop item
*/
- (void)stop;
/*!
#brief Seek required position in item and pla if rquired
#param progressValue
% of position to seek
#param isPlaying
YES if player should start to play item implicity
*/
- (void)seekPositionAtProgress:(CGFloat)progressValue withPlayingStatus:(BOOL)isPlaying;
/*!
#brief Player state
#result
YES - if playing, NO if not playing
*/
- (BOOL)isPlaying;
/*!
#brief Indicate whenever player can provide CVPixelBufferRef frame from item
#result
YES / NO
*/
- (BOOL)canProvideFrame;
/*!
#brief CVPixelBufferRef frame from item
#result
CVPixelBufferRef frame
*/
- (CVPixelBufferRef)getCurrentFramePicture;
#end
VideoPlayer.m
#import "VideoPlayer.h"
typedef NS_ENUM(NSUInteger, InternalStatus) {
InternalStatusPreparation,
InternalStatusReadyToPlay,
};
static const NSString *ItemStatusContext;
#interface VideoPlayer()
#property (strong, nonatomic) AVPlayer *assetPlayer;
#property (strong, nonatomic) AVPlayerItem *playerItem;
#property (strong, nonatomic) AVURLAsset *urlAsset;
#property (strong, atomic) AVPlayerItemVideoOutput *videoOutput;
#property (assign, nonatomic) CGFloat assetDuration;
#property (strong, nonatomic) PlayerView *playerView;
#property (assign, nonatomic) InternalStatus status;
#end
#implementation VideoPlayer
#pragma mark - LifeCycle
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset
{
if (self = [super init]) {
[self initialSetupWithURL:urlAsset];
}
return self;
}
- (instancetype)initVideoPlayerWithURL:(NSURL *)urlAsset playerView:(PlayerView *)view
{
if (self = [super init]) {
((AVPlayerLayer *)view.layer).videoGravity = AVLayerVideoGravityResizeAspectFill;
[self initialSetupWithURL:urlAsset playerView:view];
}
return self;
}
#pragma mark - Public
- (void)play
{
if ((self.assetPlayer.currentItem) && (self.assetPlayer.currentItem.status == AVPlayerItemStatusReadyToPlay)) {
[self.assetPlayer play];
}
}
- (void)pause
{
[self.assetPlayer pause];
}
- (void)seekPositionAtProgress:(CGFloat)progressValue withPlayingStatus:(BOOL)isPlaying
{
[self.assetPlayer pause];
int32_t timeScale = self.assetPlayer.currentItem.asset.duration.timescale;
__weak typeof(self) weakSelf = self;
[self.assetPlayer seekToTime:CMTimeMakeWithSeconds(progressValue, timeScale) completionHandler:^(BOOL finished) {
DLog(#"SEEK To time %f - success", progressValue);
if (isPlaying && finished) {
[weakSelf.assetPlayer play];
}
}];
}
- (void)setPlayerVolume:(CGFloat)volume
{
self.assetPlayer.volume = volume > .0 ? MAX(volume, 0.7) : 0.0f;
[self.assetPlayer play];
}
- (void)setPlayerRate:(CGFloat)rate
{
self.assetPlayer.rate = rate > .0 ? rate : 0.0f;
}
- (void)stop
{
[self.assetPlayer seekToTime:kCMTimeZero];
self.assetPlayer.rate =.0f;
}
- (BOOL)isPlaying
{
return self.assetPlayer.rate > 0 ? YES : NO;
}
#pragma mark - Private
- (void)initialSetupWithURL:(NSURL *)url
{
self.status = InternalStatusPreparation;
[self setupPlayerWithURL:url];
}
- (void)initialSetupWithURL:(NSURL *)url playerView:(PlayerView *)view
{
[self setupPlayerWithURL:url];
self.playerView = view;
}
- (void)setupPlayerWithURL:(NSURL *)url
{
NSDictionary *assetOptions = #{ AVURLAssetPreferPreciseDurationAndTimingKey : #YES };
self.urlAsset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
}
- (void)prepareToPlay
{
NSArray *keys = #[#"tracks"];
__weak VideoPlayer *weakSelf = self;
[weakSelf.urlAsset loadValuesAsynchronouslyForKeys:keys completionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf startLoading];
});
}];
}
- (void)startLoading
{
NSError *error;
AVKeyValueStatus status = [self.urlAsset statusOfValueForKey:#"tracks" error:&error];
if (status == AVKeyValueStatusLoaded) {
self.assetDuration = CMTimeGetSeconds(self.urlAsset.duration);
NSDictionary* videoOutputOptions = #{ (id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)};
self.videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:videoOutputOptions];
self.playerItem = [AVPlayerItem playerItemWithAsset: self.urlAsset];
[self.playerItem addObserver:self
forKeyPath:#"status"
options:NSKeyValueObservingOptionInitial
context:&ItemStatusContext];
[self.playerItem addObserver:self
forKeyPath:#"loadedTimeRanges"
options:NSKeyValueObservingOptionNew|NSKeyValueObservingOptionOld
context:&ItemStatusContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:self.playerItem];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(didFailedToPlayToEnd)
name:AVPlayerItemFailedToPlayToEndTimeNotification
object:nil];
[self.playerItem addOutput:self.videoOutput];
self.assetPlayer = [AVPlayer playerWithPlayerItem:self.playerItem];
[self addPeriodicalObserver];
[((AVPlayerLayer *)self.playerView.layer) setPlayer:self.assetPlayer];
DLog(#"Player created");
} else {
DLog(#"The asset's tracks were not loaded:\n%#", error.localizedDescription);
}
}
#pragma mark - Observation
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
BOOL isOldKey = [change[NSKeyValueChangeNewKey] isEqual:change[NSKeyValueChangeOldKey]];
if (!isOldKey) {
if (context == &ItemStatusContext) {
if ([keyPath isEqualToString:#"status"] && !self.status) {
if (self.assetPlayer.status == AVPlayerItemStatusReadyToPlay) {
self.status = InternalStatusReadyToPlay;
}
[self moviePlayerDidChangeStatus:self.assetPlayer.status];
} else if ([keyPath isEqualToString:#"loadedTimeRanges"]) {
[self moviewPlayerLoadedTimeRangeDidUpdated:self.playerItem.loadedTimeRanges];
}
}
}
}
- (void)moviePlayerDidChangeStatus:(AVPlayerStatus)status
{
if (status == AVPlayerStatusFailed) {
DLog(#"Failed to load video");
} else if (status == AVPlayerItemStatusReadyToPlay) {
DLog(#"Player ready to play");
self.volume = self.assetPlayer.volume;
if (self.delegate && [self.delegate respondsToSelector:#selector(isReadyToPlay)]) {
[self.delegate isReadyToPlay];
}
}
}
- (void)moviewPlayerLoadedTimeRangeDidUpdated:(NSArray *)ranges
{
NSTimeInterval maximum = 0;
for (NSValue *value in ranges) {
CMTimeRange range;
[value getValue:&range];
NSTimeInterval currenLoadedRangeTime = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration);
if (currenLoadedRangeTime > maximum) {
maximum = currenLoadedRangeTime;
}
}
CGFloat progress = (self.assetDuration == 0) ? 0 : maximum / self.assetDuration;
if (self.delegate && [self.delegate respondsToSelector:#selector(downloadingProgress:)]) {
[self.delegate downloadingProgress:progress];
}
}
- (void)playerItemDidReachEnd:(NSNotification *)notification
{
if (self.delegate && [self.delegate respondsToSelector:#selector(playerDidPlayItem)]){
[self.delegate playerDidPlayItem];
}
[self.assetPlayer seekToTime:kCMTimeZero];
if (self.autoRepeat) {
[self.assetPlayer play];
}
}
- (void)didFailedToPlayToEnd
{
DLog(#"Failed play video to the end");
}
- (void)addPeriodicalObserver
{
CMTime interval = CMTimeMake(1, 1);
__weak typeof(self) weakSelf = self;
[self.assetPlayer addPeriodicTimeObserverForInterval:interval queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
[weakSelf playerTimeDidChange:time];
}];
}
- (void)playerTimeDidChange:(CMTime)time
{
double timeNow = CMTimeGetSeconds(self.assetPlayer.currentTime);
if (self.delegate && [self.delegate respondsToSelector:#selector(progressDidUpdate:)]) {
[self.delegate progressDidUpdate:(CGFloat) (timeNow / self.assetDuration)];
}
}
#pragma mark - Notification
- (void)setupAppNotification
{
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(didEnterBackground) name:UIApplicationDidEnterBackgroundNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(willEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil];
}
- (void)didEnterBackground
{
[self.assetPlayer pause];
}
- (void)willEnterForeground
{
[self.assetPlayer pause];
}
#pragma mark - GetImagesFromVideoPlayer
- (BOOL)canProvideFrame
{
return self.assetPlayer.status == AVPlayerItemStatusReadyToPlay;
}
- (CVPixelBufferRef)getCurrentFramePicture
{
CMTime currentTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if (self.delegate && [self.delegate respondsToSelector:#selector(progressTimeChanged:)]) {
[self.delegate progressTimeChanged:currentTime];
}
if (![self.videoOutput hasNewPixelBufferForItemTime:currentTime]) {
return 0;
}
CVPixelBufferRef buffer = [self.videoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:NULL];
return buffer;
}
#pragma mark - CleanUp
- (void)removeObserversFromPlayer
{
#try {
[self.playerItem removeObserver:self forKeyPath:#"status"];
[self.playerItem removeObserver:self forKeyPath:#"loadedTimeRanges"];
[[NSNotificationCenter defaultCenter] removeObserver:self];
[[NSNotificationCenter defaultCenter] removeObserver:self.assetPlayer];
}
#catch (NSException *ex) {
DLog(#"Cant remove observer in Player - %#", ex.description);
}
}
- (void)cleanUp
{
[self removeObserversFromPlayer];
self.assetPlayer.rate = 0;
self.assetPlayer = nil;
self.playerItem = nil;
self.urlAsset = nil;
}
- (void)dealloc
{
[self cleanUp];
}
#end
Off cause resource (video file) should have target membership setted to your project
Additional - link to perfect Apple Developer guide

Here a solution for Swift 5.2
PlayerView.swift:
import AVFoundation
import UIKit
class PlayerView: UIView {
var player: AVPlayer? {
get {
return playerLayer.player
}
set {
playerLayer.player = newValue
}
}
var playerLayer: AVPlayerLayer {
return layer as! AVPlayerLayer
}
// Override UIView property
override static var layerClass: AnyClass {
return AVPlayerLayer.self
}
}
VideoPlayer.swift
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext: UnsafeMutableRawPointer? = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 1.0 {
didSet {
if let player = assetPlayer {
player.volume = volume > 0 ? volume : 0.0
}
}
}
// MARK: - Init
convenience init(urlAsset:NSURL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView, let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
}
initialSetupWithURL(url: urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seek(to: CMTimeMakeWithSeconds(seconds, preferredTimescale: timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .readyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NotificationCenter.default.removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.main.async {
self.startLoading()
}
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
let status:AVKeyValueStatus = asset.statusOfValue(forKey: "tracks", error: &error)
if status == AVKeyValueStatus.loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.new, .old], context: videoContext)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didFailedToPlayToEnd), name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: nil)
if let output = videoOutput {
item.add(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithm.varispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
print("player created")
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(value: 1, timescale: 1)
if let player = assetPlayer {
player.addPeriodicTimeObserver(forInterval: timeInterval, queue: DispatchQueue.main, using: { (time) in
self.playerDidChangeTime(time: time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress: progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seek(to: CMTime.zero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayer.Status) {
if status == .failed {
print("Failed to load video")
} else if status == .readyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:TimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.timeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress: progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url:NSURL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
urlAsset = AVURLAsset(url: url as URL, options: options)
}
// MARK: - Observations
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(status: player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(ranges: item.loadedTimeRanges)
}
}
}
}
}
Usage:
private var playerView: PlayerView = PlayerView()
private var videoPlayer:VideoPlayer?
and inside viewDidLoad():
view.addSubview(playerView)
preparePlayer()
// set Constraints (if you do it purely in code)
playerView.translatesAutoresizingMaskIntoConstraints = false
playerView.topAnchor.constraint(equalTo: view.topAnchor, constant: 10.0).isActive = true
playerView.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant: 10.0).isActive = true
playerView.trailingAnchor.constraint(equalTo: view.trailingAnchor, constant: -10.0).isActive = true
playerView.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: 10.0).isActive = true
private func preparePlayer() {
if let filePath = Bundle.main.path(forResource: "my video", ofType: "mp4") {
let fileURL = NSURL(fileURLWithPath: filePath)
videoPlayer = VideoPlayer(urlAsset: fileURL, view: playerView)
if let player = videoPlayer {
player.playerRate = 0.67
}
}
}

Swift 3
if let filePath = Bundle.main.path(forResource: "small", ofType: ".mp4") {
let filePathURL = NSURL.fileURL(withPath: filePath)
let player = AVPlayer(url: filePathURL)
let playerController = AVPlayerViewController()
playerController.player = player
self.present(playerController, animated: true) {
player.play()
}
}

None of the above worked for me Swift 5 for Local Video Player
after reading apple documentation I was able to create simple example for playing video from Local resources
Here is code snip
import UIKit
import AVKit
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
//TODO : Make Sure Add and copy "SampleVideo.mp4" file in project before play
}
#IBAction func playLocalVideo(_ sender: Any) {
guard let path = Bundle.main.path(forResource: "SampleVideo", ofType: "mp4") else {
return
}
let videoURL = NSURL(fileURLWithPath: path)
// Create an AVPlayer, passing it the local video url path
let player = AVPlayer(url: videoURL as URL)
let controller = AVPlayerViewController()
controller.player = player
present(controller, animated: true) {
player.play()
}
}
}
PS: Make sure you don't forget to add and copy video named
"SampleVideo.mp4" in project

these code is converted code from gbk answer in swift 4
1.in your main controllview :
if let filePath = Bundle.main.path(forResource: "clip", ofType: "mp4") {
let fileURL = NSURL(fileURLWithPath: filePath)
videoPlayer = VideoPlayer(urlAsset: fileURL, view: playerView)
if let player = videoPlayer {
player.playerRate = 1.00
}
}
you need VideoPlayer class
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext:UnsafeMutablePointer<Void>? = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 0 {
didSet {
if let player = assetPlayer {
player.volume = 50
}
}
}
// MARK: - Init
convenience init(urlAsset:NSURL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView{
if let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
}
}
initialSetupWithURL(url: urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seek(to: CMTimeMakeWithSeconds(seconds, timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .readyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NotificationCenter.default.removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.global(qos: .userInitiated).async {
// Bounce back to the main thread to update the UI
DispatchQueue.main.async {
self.startLoading()
}
}
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
// let status:AVKeyValueStatus = asset.statusOfValueForKey("tracks", error: &error)
let status:AVKeyValueStatus = asset.statusOfValue(forKey: "tracks", error: nil)
if status == AVKeyValueStatus.loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.new, .old], context: videoContext)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didFailedToPlayToEnd), name:NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: nil)
if let output = videoOutput {
item.add(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithm.varispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(1, 1)
if let player = assetPlayer {
player.addPeriodicTimeObserver(forInterval: timeInterval, queue:
DispatchQueue.main
, using: { (time) in
self.playerDidChangeTime(time: time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress: progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seek(to: kCMTimeZero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayerStatus) {
if status == .failed {
print("Failed to load video")
} else if status == .readyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:TimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.timeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress: progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url:NSURL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
// urlAsset = AVURLAsset(URL: url, options: options)
urlAsset = AVURLAsset(url: url as URL, options: options)
}
// MARK: - Observations
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?){
if (context as? UnsafeMutablePointer<Void> ) == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(status: player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(ranges: item.loadedTimeRanges)
}
}
}
}
}
and a PlayerView class to create a videoview:
import AVFoundation
import UIKit
class PlayerView: UIView {
override class var layerClass: AnyClass {
get {
return AVPlayerLayer.self
}
}
var player:AVPlayer? {
set {
if let layer = layer as? AVPlayerLayer {
layer.player = player
}
}
get {
if let layer = layer as? AVPlayerLayer {
return layer.player
} else {
return nil
}
}
}
}

PlayerView for swift 4.2
import AVFoundation
import UIKit
class PlayerView: UIView {
var player: AVPlayer? {
get {
return playerLayer.player
}
set {
playerLayer.player = newValue
}
}
var playerLayer: AVPlayerLayer {
return layer as! AVPlayerLayer
}
// Override UIView property
override static var layerClass: AnyClass {
return AVPlayerLayer.self
}
}

gbk's solution in swift 3
PlayerView
import AVFoundation
import UIKit
class PlayerView: UIView {
override class var layerClass: AnyClass {
return AVPlayerLayer.self
}
var player:AVPlayer? {
set {
if let layer = layer as? AVPlayerLayer {
layer.player = player
}
}
get {
if let layer = layer as? AVPlayerLayer {
return layer.player
} else {
return nil
}
}
}
}
VideoPlayer
import AVFoundation
import Foundation
protocol VideoPlayerDelegate {
func downloadedProgress(progress:Double)
func readyToPlay()
func didUpdateProgress(progress:Double)
func didFinishPlayItem()
func didFailPlayToEnd()
}
let videoContext:UnsafeMutableRawPointer? = nil
class VideoPlayer : NSObject {
private var assetPlayer:AVPlayer?
private var playerItem:AVPlayerItem?
private var urlAsset:AVURLAsset?
private var videoOutput:AVPlayerItemVideoOutput?
private var assetDuration:Double = 0
private var playerView:PlayerView?
private var autoRepeatPlay:Bool = true
private var autoPlay:Bool = true
var delegate:VideoPlayerDelegate?
var playerRate:Float = 1 {
didSet {
if let player = assetPlayer {
player.rate = playerRate > 0 ? playerRate : 0.0
}
}
}
var volume:Float = 1.0 {
didSet {
if let player = assetPlayer {
player.volume = volume > 0 ? volume : 0.0
}
}
}
// MARK: - Init
convenience init(urlAsset: URL, view:PlayerView, startAutoPlay:Bool = true, repeatAfterEnd:Bool = true) {
self.init()
playerView = view
autoPlay = startAutoPlay
autoRepeatPlay = repeatAfterEnd
if let playView = playerView, let playerLayer = playView.layer as? AVPlayerLayer {
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
}
initialSetupWithURL(url: urlAsset)
prepareToPlay()
}
override init() {
super.init()
}
// MARK: - Public
func isPlaying() -> Bool {
if let player = assetPlayer {
return player.rate > 0
} else {
return false
}
}
func seekToPosition(seconds:Float64) {
if let player = assetPlayer {
pause()
if let timeScale = player.currentItem?.asset.duration.timescale {
player.seek(to: CMTimeMakeWithSeconds(seconds, timeScale), completionHandler: { (complete) in
self.play()
})
}
}
}
func pause() {
if let player = assetPlayer {
player.pause()
}
}
func play() {
if let player = assetPlayer {
if (player.currentItem?.status == .readyToPlay) {
player.play()
player.rate = playerRate
}
}
}
func cleanUp() {
if let item = playerItem {
item.removeObserver(self, forKeyPath: "status")
item.removeObserver(self, forKeyPath: "loadedTimeRanges")
}
NotificationCenter.default.removeObserver(self)
assetPlayer = nil
playerItem = nil
urlAsset = nil
}
// MARK: - Private
private func prepareToPlay() {
let keys = ["tracks"]
if let asset = urlAsset {
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.main.async {
self.startLoading()
}
})
}
}
private func startLoading(){
var error:NSError?
guard let asset = urlAsset else {return}
let status:AVKeyValueStatus = asset.statusOfValue(forKey: "tracks", error: &error)
if status == AVKeyValueStatus.loaded {
assetDuration = CMTimeGetSeconds(asset.duration)
let videoOutputOptions = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: videoOutputOptions)
playerItem = AVPlayerItem(asset: asset)
if let item = playerItem {
item.addObserver(self, forKeyPath: "status", options: .initial, context: videoContext)
item.addObserver(self, forKeyPath: "loadedTimeRanges", options: [.new, .old], context: videoContext)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didFailedToPlayToEnd), name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: nil)
if let output = videoOutput {
item.add(output)
item.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
assetPlayer = AVPlayer(playerItem: item)
if let player = assetPlayer {
player.rate = playerRate
}
addPeriodicalObserver()
if let playView = playerView, let layer = playView.layer as? AVPlayerLayer {
layer.player = assetPlayer
print("player created")
}
}
}
}
}
private func addPeriodicalObserver() {
let timeInterval = CMTimeMake(1, 1)
if let player = assetPlayer {
player.addPeriodicTimeObserver(forInterval: timeInterval, queue: DispatchQueue.main, using: { (time) in
self.playerDidChangeTime(time: time)
})
}
}
private func playerDidChangeTime(time:CMTime) {
if let player = assetPlayer {
let timeNow = CMTimeGetSeconds(player.currentTime())
let progress = timeNow / assetDuration
delegate?.didUpdateProgress(progress: progress)
}
}
#objc private func playerItemDidReachEnd() {
delegate?.didFinishPlayItem()
if let player = assetPlayer {
player.seek(to: kCMTimeZero)
if autoRepeatPlay == true {
play()
}
}
}
#objc private func didFailedToPlayToEnd() {
delegate?.didFailPlayToEnd()
}
private func playerDidChangeStatus(status:AVPlayerStatus) {
if status == .failed {
print("Failed to load video")
} else if status == .readyToPlay, let player = assetPlayer {
volume = player.volume
delegate?.readyToPlay()
if autoPlay == true && player.rate == 0.0 {
play()
}
}
}
private func moviewPlayerLoadedTimeRangeDidUpdated(ranges:Array<NSValue>) {
var maximum:TimeInterval = 0
for value in ranges {
let range:CMTimeRange = value.timeRangeValue
let currentLoadedTimeRange = CMTimeGetSeconds(range.start) + CMTimeGetSeconds(range.duration)
if currentLoadedTimeRange > maximum {
maximum = currentLoadedTimeRange
}
}
let progress:Double = assetDuration == 0 ? 0.0 : Double(maximum) / assetDuration
delegate?.downloadedProgress(progress: progress)
}
deinit {
cleanUp()
}
private func initialSetupWithURL(url: URL) {
let options = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
urlAsset = AVURLAsset(url: url, options: options)
}
// MARK: - Observations
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == videoContext {
if let key = keyPath {
if key == "status", let player = assetPlayer {
playerDidChangeStatus(status: player.status)
} else if key == "loadedTimeRanges", let item = playerItem {
moviewPlayerLoadedTimeRangeDidUpdated(ranges: item.loadedTimeRanges)
}
}
}
}
}

Related

iOS Swift AVFoundation Video Recording AVCaptureMovieFileOutput isRecording value false every time

I'm trying to record and save video using AVFoundation Framework with both front and rear camera. I'm able to start session but unable to save video recording in document directory.
I check movieOutput.isRecording it gives false every time. Hence the delegate output method is also not called due to this. Even Start delegate is not called on start recording.
import UIKit
import Foundation
import AVKit
import AVFoundation
class AppVideoRecorder: NSObject {
private var session = AVCaptureSession()
private var movieOutput = AVCaptureMovieFileOutput()
private var camera: AVCaptureDevice?
private var activeInput: AVCaptureDeviceInput?
private var previewLayer = AVCaptureVideoPreviewLayer()
private var renderView: UIView!
var isFrontCamera: Bool = false
init(for view: UIView) {
self.renderView = view
}
deinit {
print("Called")
}
func setupSession() {
self.session.sessionPreset = .high
// Setup Camera
self.camera = AVCaptureDevice.default(
.builtInWideAngleCamera,
for: .video,
position: self.isFrontCamera ? .front : .back
)
if let camera = self.camera {
do {
let input = try AVCaptureDeviceInput(device: camera)
if self.session.canAddInput(input) {
self.session.addInput(input)
self.activeInput = input
}
} catch {
print(error)
}
}
// Setup Microphone
if let microphone = AVCaptureDevice.default(for: .audio) {
do {
let micInput = try AVCaptureDeviceInput(device: microphone)
if self.session.canAddInput(micInput) {
self.session.addInput(micInput)
}
} catch {
print(error)
}
}
// Movie output
if self.session.canAddOutput(self.movieOutput) {
self.session.addOutput(self.movieOutput)
}
}
func setupPreview() {
// Configure previewLayer
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.frame = self.renderView.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.renderView.layer.addSublayer(self.previewLayer)
}
func startSession() {
if self.session.isRunning { return }
DispatchQueue.main.async {
self.session.startRunning()
}
}
func stopSession() {
if self.session.isRunning {
DispatchQueue.main.async {
self.session.stopRunning()
}
}
}
func removeInput() {
guard let input = self.activeInput else { return }
self.session.removeInput(input)
}
func isCameraOn(completion: #escaping (Bool) -> Void) {
if AVCaptureDevice.authorizationStatus(for: .video) == .authorized {
completion(true)
} else {
AVCaptureDevice.requestAccess(for: .video,
completionHandler: { (granted) in
completion(granted)
})
}
}
func toggleCamera() {
self.session.beginConfiguration()
for input in self.session.inputs {
if let inputObj = input as? AVCaptureDeviceInput {
self.session.removeInput(inputObj)
}
}
self.camera = AVCaptureDevice.default(
.builtInWideAngleCamera,
for: .video,
position: self.isFrontCamera ? .front : .back
)
if let camera = self.camera {
do {
let input = try AVCaptureDeviceInput(device: camera)
if self.session.canAddInput(input) {
self.session.addInput(input)
self.activeInput = input
}
} catch {
print(error)
}
}
self.session.commitConfiguration()
}
}
extension AppVideoRecorder: AVCaptureFileOutputRecordingDelegate {
private var currentVideoOrientation: AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
func recordVideo() {
if self.movieOutput.isRecording { // FALSE EVERY TIME
self.stopRecording()
} else {
self.startRecording()
}
}
private func startRecording() {
guard let connection = self.movieOutput.connection(with: .video),
let device = self.activeInput?.device else { return }
// handle return error
if connection.isVideoOrientationSupported {
connection.videoOrientation = self.currentVideoOrientation
}
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
if device.isSmoothAutoFocusSupported {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
let paths = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask)
guard let path = paths.first else { return }
let fileUrl = path.appendingPathComponent("celeb_video.mp4")
try? FileManager.default.removeItem(at: fileUrl)
self.movieOutput.startRecording(to: fileUrl, recordingDelegate: self)
}
private func stopRecording() {
self.movieOutput.stopRecording()
}
func fileOutput(_ output: AVCaptureFileOutput,
didFinishRecordingTo outputFileURL: URL,
from connections: [AVCaptureConnection],
error: Error?) {
print("DELEGATE CALL BACK")
if let error = error {
//do something
print(error)
} else {
//do something
print(outputFileURL.path)
// UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
func fileOutput(_ output: AVCaptureFileOutput,
didStartRecordingTo fileURL: URL,
from connections: [AVCaptureConnection]) {
print("didStartRecordingTo CALL BACK:", fileURL.path)
}
}
Here is my calling code in view controller. recordingView is UIView
private lazy var recorder: AppVideoRecorder = {
return AppVideoRecorder(for: self.recordingView)
}()
#IBAction func recordingAction(_ sender: UIButton) {
sender.isSelected.toggle()
if sender.isSelected {
self.recorder.setupSession()
self.recorder.setupPreview()
self.recorder.startSession()
self.recorder.recordVideo()
} else {
self.recorder.recordVideo()
self.recorder.removeInput()
self.recorder.stopSession()
}
}
#IBAction func swapCameraAction(_ sender: UIButton) {
sender.isSelected.toggle()
self.recorder.isFrontCamera = sender.isSelected
self.recorder.toggleCamera()
}
Please let me know what I missed.
As from the link Starting video recording immediately with AVCaptureMovieFileOutput
I have added notifications, now it is working as it takes time to start.
private func setupNotifications() {
NotificationCenter.default.addObserver(self,
selector: #selector(sessionDidStartRunning(_:)),
name: .AVCaptureSessionDidStartRunning,
object: nil)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionDidStopRunning(_:)),
name: .AVCaptureSessionDidStopRunning,
object: nil)
}
#objc
private func sessionDidStartRunning(_ notification: NSNotification) {
self.startRecording()
}
#objc
private func sessionDidStopRunning(_ notification: NSNotification) {
}

Pausing and Resuming Timer in Swift

I am playing an audio which can be pausing, resuming and stoped I have also a slider which can be use to change the play the audio current point and a label that show duration of the audio and I want to pause the timer when the user pause the audio and I read If I want that I can invalidate and nill the timer then start it again , but the issue with that it will replay the audio from the start, is there is way to start the timer at the last point it was paused?
func startTimer() {
if replayTimer == nil {
replayTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateSlider), userInfo: nil, repeats: true)
}
}
#objc func updateSlider() {
progressBar.value = Float(audio.audio!.currentTime)
}
#IBAction func playReceiverVoicenote(_ sender: Any) {
if replayTimer == nil {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.playAudio(filePath: filePath!)
startTimer()
receiverProgressBar.maximumValue = audio.getAudioDuration()
} else if audio.isAudioPlaying() {
audioBtn.setImage(#imageLiteral(resourceName: "playAudio"), for: .normal)
audio.pauseAudio()
replayTimer?.invalidate()
replayTimer = nil
} else {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.replayAudio()
startTimer()
}
}
func playAudio(filePath:URL){
do {
audio = try AVAudioPlayer(contentsOf: filePath)
audio!.delegate = self
audio!.prepareToPlay()
audio!.volume = 1.0
audio!.play()
} catch {
print(error.localizedDescription)
}
}
func pauseAudio() {
audio!.pause()
}
func replayAudio() {
audio!.play()
}
func stopAudio() {
audio!.stop()
}
When the audio is going to pause save audio.currentTime in a variable and invalidate the timer.
When the audio is going to resume get the saved currentTime, call play(atTime:) passing the time interval and restart the timer
You can also use this Audio Manager in this way:
let player = AudioPlayer()
player.loadAudio(url: URL(string: "myAudioUrl.mp3")!, name: "", img: "")
And the action of the button to play and pause:
if player.isPlaying {
player.pauseAudio()
} else {
player.playAudio { isFinish, player, currentTimeInSec, remainingTimeInSec in
if isFinish {
// Audio finish to play
}
}
}
Who return in block closure:
isFinish: Bool // If the audio has finished playing
player: AVAudioPlayer // The player
currentTimeInSec: Int // The current time in seconds of the audio playing
remainingTimeInsec: Int // The remaining time
AudioPlayer:
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
var audioPlayer: AVAudioPlayer?
var hasBeenPaused = false
var songName = ""
var songImage = ""
var timer: Timer?
var isPlaying: Bool {
return audioPlayer?.isPlaying ?? false
}
public func loadAudio(url: URL, name: String, img: String) {
songName = name
songImage = img
setupRemoteTransportControls()
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
audioPlayer?.prepareToPlay()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback, mode: .default)
} catch let sessionError {
print(sessionError)
}
} catch let songPlayerError {
print(songPlayerError)
}
}
public func playAudio(completion: ((_ isFinish: Bool, _ player: AVAudioPlayer, _ currentTimeInSec: Int, _ restTimeInSec: Int) -> ())? = nil) {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.play()
setupNowPlaying()
if timer == nil {
timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
let currentTime = Int(audioPlayer.currentTime)
let remainingTime = Int(audioPlayer.duration) - Int(currentTime)
if remainingTime == 0 {
completion?(true, audioPlayer, currentTime, remainingTime)
if self.timer != nil {
self.timer!.invalidate()
self.timer = nil
}
} else {
completion?(false, audioPlayer, currentTime, remainingTime)
}
}
}
}
public func pauseAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying {
audioPlayer.pause()
hasBeenPaused = true
} else {
hasBeenPaused = false
}
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
public func replayAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying || hasBeenPaused {
audioPlayer.stop()
audioPlayer.currentTime = 0
audioPlayer.play()
} else {
audioPlayer.play()
}
setupNowPlaying()
}
public func stopAudio() {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.stop()
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.previousTrackCommand.isEnabled = false
commandCenter.nextTrackCommand.isEnabled = false
commandCenter.skipBackwardCommand.isEnabled = false
commandCenter.skipForwardCommand.isEnabled = false
commandCenter.playCommand.isEnabled = true
commandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the play command
self.playAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
commandCenter.pauseCommand.isEnabled = true
commandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the pause command
self.pauseAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
}
func setupNowPlaying() {
guard let audioPlayer = audioPlayer else { return }
var nowPlayingInfo = [String: Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = songName
if let image = UIImage(named: songImage) {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = audioPlayer.currentTime
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = audioPlayer.duration
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = audioPlayer.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}

AVPlayerViewController control in iPhoneX

I am not able to find the solution to display AVPlayerViewController control in iPhoneX. By using AVPlayerViewController.gravity = .aspectfill shows control but video fills whole screen and clips remaining portions. Any idea?? Here is my code:
func loadVideoURL () {
let fileURL = URL.init(string: (exerciseDetailsObj?.video_file)!)
avPlayer = AVPlayer(url: fileURL!)
let playerController = AVPlayerViewController()
playerController.player = avPlayer
playerController.view.frame = CGRect(x: 0, y: 0, width:viewPlayerContainer.bounds.size.width, height: viewPlayerContainer.bounds.size.height)
playerController.showsPlaybackControls = false
avPlayerController = playerController
self.addChild(avPlayerController)
viewPlayerContainer.addSubview(avPlayerController.view)
viewPlayerContainer.sendSubviewToBack(avPlayerController.view)
avPlayerController.didMove(toParent: self)
avPlayerController.addObserver(self, forKeyPath: "videoBounds", options: NSKeyValueObservingOptions.new, context: nil)
}
}
func enterFullscreen(playerViewController: AVPlayerViewController) {
playerViewController.showsPlaybackControls = true //not dispalyed in iphonX
let selectorName: String = {
if #available(iOS 11.3, *) {
return "_transitionToFullScreenAnimated:interactive:completionHandler:"
} else if #available(iOS 11, *) {
return "_transitionToFullScreenAnimated:completionHandler:"
} else {
return "_transitionToFullScreenViewControllerAnimated:completionHandler:"
}
}()
let selectorToForceFullScreenMode = NSSelectorFromString(selectorName)
if playerViewController.responds(to: selectorToForceFullScreenMode) {
playerViewController.perform(selectorToForceFullScreenMode, with: true, with: nil)
}
}
func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?)
{
if keyPath == "videoBounds"
{
if let rect = change?[.newKey] as? NSValue
{
if let newrect = rect.cgRectValue as CGRect?
{
if newrect.size.height <= 214
{
avPlayerController.showsPlaybackControls = false
}
}
}
}
}
Check out this. Refer the code and change accordingly into your code.
#IBOutlet weak var videoPlayerView: UIView! // Take UIview on storyboard on which you want to display video
var player: AVPlayer?
var playerController = AVPlayerLayer()
override func viewDidLoad()
{
self.videoPlayerView.layer.insertSublayer(playerController, at: 0)
player = AVPlayer(url: videoUrl!)
playerController.player = player
self.player?.play()
}
override func viewDidLayoutSubviews() {
playerController.videoGravity = AVLayerVideoGravity(rawValue: AVLayerVideoGravity.resizeAspectFill.rawValue)
playerController.frame = self.videoPlayerView.bounds
}

iOS how to correctly handle orientation when capturing video using AVAssetWriter

I am making a sample application that utilizes AVFoundation to record video. The whole point is so I can have more control over how the video is recorded. In my sample project I have the video capturing but am struggling with handling orientation correctly.
I have done a lot of searching around the web and found that others are suggesting that I should NOT allow my capture view or capture session to rotate based on orientation, but rather set a transformation to rotate the video during playback. I have this working fine on iOS and Mac devices, but am wondering if I will have issues on other platforms such as Windows or Android.
Also, when I view the recorded video's metadata I see that the width and height are not set properly for the orientation. This makes sense as I am only transforming the presentation of the video and not it's actual resolution.
My question here is how do I correctly support portrait and landscape orientations and have it reflected correctly in the video file output? I need these videos to play on all platforms correctly so I am thinking the resolution is going to matter a great deal.
Below is the full source I have written thus far. I appreciate any advice you all can provide.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
#IBOutlet weak var previewView: UIView!
#IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
try self.configureAssetWriter()
DispatchQueue.main.async {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure video output")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure the session
if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
captureSession.sessionPreset = AVCaptureSessionPreset640x480
}
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let vidSize = videoSize
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
videoInput?.transform = getVideoTransform()
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera
case .landscapeRight:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera
default:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
self.captureSession.startRunning()
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
#IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}
Video orientation is handled by the AVAssetWriterInput.transform, looks like the getVideoTransform() implementation is not correct - CGAffineTransform expects the rotation angle to be in radians, so need to change to something like this:
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portrait:
return .identity
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: .pi)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: .pi/2)
case .landscapeRight:
return CGAffineTransform(rotationAngle: -.pi/2)
default:
return .identity
}
}
From Apple Technical Q&A:
https://developer.apple.com/library/archive/qa/qa1744/_index.html
If you are using an AVAssetWriter object to write a movie file, you
can use the transform property of the associated AVAssetWriterInput to
specify the output file orientation. This will write a display
transform property into the output file as the preferred
transformation of the visual media data for display purposes. See the
AVAssetWriterInput.h interface file for the details.
I have found a solution to my problem. The solution is to export the video using AVAssetExportSession to handle setting the video size and then handling the rotation at the time of export and not during recording. I still have an issue were I need to fix the scale factor to go from my original video size to a smaller 640x480 resolution, but at least I solved my rotation issues. Please see updated code below.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
#IBOutlet weak var previewView: UIView!
#IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var exportUrl: URL {
get {
if let url = _exportUrl {
return url
}
_exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
return _exportUrl!
}
}
private var _exportUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
private var exportPreset = AVAssetExportPreset640x480
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
DispatchQueue.main.sync {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure capture session")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
if assetWriter != nil {
assetWriter = nil
videoInput = nil
audioInput = nil
}
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
do {
try self.configureAssetWriter()
self.captureSession.startRunning()
} catch {
print("Unable to start recording")
DispatchQueue.main.async { self.showAlert("Unable to start recording") }
}
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
do {
try self.export()
} catch {
print("Export failed")
DispatchQueue.main.async { self.showAlert("Unable to export video") }
}
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: - Export
private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {
guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
print("Unable to get video tracks")
return nil
}
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = videoSize
let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);
let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
var transforms = asset.preferredTransform
var isPortrait = true;
if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
|| (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
isPortrait = false;
}
if isPortrait {
transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
}
layerInst.setTransform(transforms, at: kCMTimeZero)
let inst = AVMutableVideoCompositionInstruction()
inst.backgroundColor = UIColor.black.cgColor
inst.layerInstructions = [layerInst]
inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
videoComposition.instructions = [inst]
return videoComposition
}
private func export() throws {
let videoAsset = AVURLAsset(url: outputUrl)
if FileManager.default.fileExists(atPath: exportUrl.path) {
try FileManager.default.removeItem(at: exportUrl)
}
let videoSize = getVideoSize()
guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
print("Unable to create encoder")
return
}
guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
print("Unable to create video composition")
return
}
encoder.videoComposition = vidcomp
encoder.outputFileType = AVFileTypeMPEG4 // MP4 format
encoder.outputURL = exportUrl
encoder.shouldOptimizeForNetworkUse = true
encoder.exportAsynchronously(completionHandler: {
print("Video exported successfully")
})
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
#IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}
I found that the easiest is to set the preferredTransform on the video composition track depending on the required orientation.
Solution
// Devices orientation
var orientation = UIDevice.current.orientation
// The composition
let audioVideoComposition = AVMutableComposition()
// The video track of the composition
let videoCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .video, preferredTrackID: .init())!
// Set preferred transform
videoCompositionTrack.preferredTransform = getVideoTransform()
Helper function and extension
func getVideoTransform() -> CGAffineTransform {
switch orientation {
case .portrait:
return CGAffineTransform(rotationAngle: 90.degreesToRadians)
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: 180)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: 0.degreesToRadians)
case .landscapeRight:
return CGAffineTransform(rotationAngle: 180.degreesToRadians)
default:
return CGAffineTransform(rotationAngle: 90.degreesToRadians)
}
}
extension BinaryInteger {
var degreesToRadians: CGFloat { CGFloat(self) * .pi / 180 }
}
extension FloatingPoint {
var degreesToRadians: Self { self * .pi / 180 }
var radiansToDegrees: Self { self * 180 / .pi }
}
just swap width and height in writer settings
and don't forget about the HEVC
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
https://developer.apple.com/videos/play/wwdc2017/503
https://developer.apple.com/videos/play/wwdc2017/511
- (BOOL)
configureWriterInput {
const BOOL isError = YES;
AVFileType
mov = AVFileTypeQuickTimeMovie;
NSDictionary <NSString *, id> *settings;
// HEVC
if (#available(iOS 11.0, *)) {
NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
mov];
const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];
if (isHEVC) {
settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
AVVideoCodecTypeHEVC
assetWriterOutputFileType:
mov];
}
else {
settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
mov];
}
}
else {
settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
mov];
}
if ([writer
canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {
// swap width and height to fix orientation
NSMutableDictionary <NSString *, id> *rotate =
[settings mutableCopy];
if (YES
&& settings[AVVideoHeightKey]
&& settings[AVVideoWidthKey]
) {
rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];
if ([writer
canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
settings = rotate;
}
else {
}
}
else {
}
}
else {
return isError;
}
writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:settings];
{
// AVCaptureConnection *con =
// [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];
// const AVCaptureVideoOrientation o = con.videoOrientation;
// writerInput.transform = [[self class] configureOrientationTransform:o];
}
if ([writer canAddInput:writerInput]) {
[writer addInput:writerInput];
return ! isError;
}
else {
return isError;
}
}

How to buffer audio using AVPlayer in iOS?

I want to play stream audio from the Internet. I wrote code that plays stream but it don't have any buffer so if signal is weak application stop playing audio. This is my code:
import UIKit
import AVFoundation
import MediaPlayer
import AudioToolbox
class ViewController: UIViewController {
var playerItem:AVPlayerItem?
var player:AVPlayer?
#IBOutlet weak var PlayButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
var buffer = AVAudioBuffer ()
let url = NSURL (string: "http://radio.afera.com.pl/afera64.aac")
playerItem = AVPlayerItem(URL: url!)
player = AVPlayer(playerItem: playerItem!)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func PlayButtonTapped(sender: AnyObject)
{
if ((player!.rate != 0) && (player!.error == nil))
{
player!.pause()
PlayButton.setImage(UIImage(named:"Play"), forState: UIControlState.Normal)
}
else
{
player!.play()
PlayButton.setImage(UIImage(named:"Stop"), forState:UIControlState.Normal)
}
}
}
I have no idea how to buffer this stream. I searched apple documentation but can't find anything in Swift.
I find something like AVAudioBuffer but I don't know how to use it, and if its correct to resolve this problem.
P.S. C# have documentation on MSDN, is something similar on Apple in case of Swift?
The answer is in creating an error delegate that launched a selector every time the player stopped (The error changes when the network connection is interrupted or the stream didn't load properly):
Here are delegates, just outside and above my RadioPlayer class:
protocol errorMessageDelegate {
func errorMessageChanged(newVal: String)
}
class:
import Foundation
import AVFoundation
import UIKit
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(self.sharedInstanceBool)
}
}
}
private var player = AVPlayer(URL: NSURL(string: Globals.radioURL)!)
private var playerItem = AVPlayerItem?()
private var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(URL: NSURL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronouslyForKeys([statusKey], completionHandler: {
var error: NSError? = nil
dispatch_async(dispatch_get_main_queue(), {
let status: AVKeyValueStatus = asset.statusOfValueForKey(statusKey, error: &error)
if status == AVKeyValueStatus.Loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NSNotificationCenter.defaultCenter().addObserverForName(
AVPlayerItemFailedToPlayToEndTimeNotification,
object: nil,
queue: nil,
usingBlock: { notification in
print("Status: Failed to continue")
self.errorMessage = "Stream was interrupted"
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(URL: NSURL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronouslyForKeys([statusKey], completionHandler: {
var error: NSError? = nil
dispatch_async(dispatch_get_main_queue(), {
let status: AVKeyValueStatus = asset.statusOfValueForKey(statusKey, error: &error)
if status == AVKeyValueStatus.Loaded{
let playerItem = AVPlayerItem(asset: asset)
//playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> NSTimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.ReadyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = timeRangeArray.objectAtIndex(0).CMTimeRangeValue
//let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (NSTimeInterval)(loadedDuration);
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(newVal: Bool)
}
RadioViewController:
import UIKit
import AVFoundation
class RadioViewController: UIViewController, errorMessageDelegate, sharedInstanceDelegate {
// MARK: Properties
var firstErrorSkip = true
var firstInstanceSkip = true
#IBOutlet weak var listenLabel: UILabel!
#IBOutlet weak var radioSwitch: UIImageView!
#IBAction func back(sender: AnyObject) {
print("Dismissing radio view")
if let navigationController = self.navigationController
{
navigationController.popViewControllerAnimated(true)
}
}
#IBAction func switched(sender: AnyObject) {
toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
print("AVAudioSession is Active")
} catch let error as NSError {
print(error.localizedDescription)
}
} catch let error as NSError {
print(error.localizedDescription)
}
RadioPlayer.sharedInstance.errorDelegate = self
RadioPlayer.sharedInstance.instanceDelegate = self
if RadioPlayer.sharedInstance.currentlyPlaying() {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func toggle() {
if RadioPlayer.sharedInstance.currentlyPlaying() {
pauseRadio()
} else {
playRadio()
}
}
func playRadio() {
firstErrorSkip = false
firstInstanceSkip = false
if RadioPlayer.sharedInstance.errorMessage != "" || RadioPlayer.sharedInstance.bufferFull() {
resetStream()
} else {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
RadioPlayer.sharedInstance.play()
}
}
func pauseRadio() {
RadioPlayer.sharedInstance.pause()
radioSwitch.image = UIImage(named: "Radio_Switch_Inactive")
listenLabel.text = "Click to Play Radio Stream:"
}
func resetStream() {
print("Reloading interrupted stream");
RadioPlayer.sharedInstance.resetPlayer()
//RadioPlayer.sharedInstance = RadioPlayer();
RadioPlayer.sharedInstance.errorDelegate = self
RadioPlayer.sharedInstance.instanceDelegate = self
if RadioPlayer.sharedInstance.bufferFull() {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
RadioPlayer.sharedInstance.play()
} else {
playRadio()
}
}
func errorMessageChanged(newVal: String) {
if !firstErrorSkip {
print("Error changed to '\(newVal)'")
if RadioPlayer.sharedInstance.errorMessage != "" {
print("Showing Error Message")
let alertController = UIAlertController(title: "Stream Failure", message: RadioPlayer.sharedInstance.errorMessage, preferredStyle: UIAlertControllerStyle.Alert)
alertController.addAction(UIAlertAction(title: "Dismiss", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alertController, animated: true, completion: nil)
pauseRadio()
}
} else {
print("Skipping first init")
firstErrorSkip = false
}
}
func sharedInstanceChanged(newVal: Bool) {
if !firstInstanceSkip {
print("Detected New Instance")
if newVal {
RadioPlayer.sharedInstance.play()
}
} else {
firstInstanceSkip = false
}
}
}
Hope this will help :)
change
playerItem = AVPlayerItem?()
to
playerItem:AVPlayerItem?

Resources