I am using this code to start playing local video chunks referenced to form a play list.
The very same code works on one project, but not on another.
On the project I am working on right now, I can see how the first chunk gets loaded, and the first frame also show up. But the AVPlayer never start playing because it never gets the AVPlayerStatusReadyToPlay notification:
- (void)loadAssetAsync
{
NSLog(#"loadAssetAsync for URL: %#", videoURL);
/**
* Create an asset for inspection of a resource referenced by a given URL.
* Load the values for the asset keys "tracks", "playable".
*/
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
NSArray *requestedKeys = [NSArray arrayWithObjects:kTracksKey, kPlayableKey, nil];
// Tells the asset to load the values of any of the specified keys that are not already loaded.
[asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
^{
dispatch_async( dispatch_get_main_queue(),
^{
// IMPORTANT: Must dispatch to main queue in order to operate on the AVPlayer and AVPlayerItem.
[self prepareToPlayAsset:asset withKeys:requestedKeys];
});
}];
}
/**
* Invoked at the completion of the loading of the values for all keys on the asset that required.
*/
- (void)prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys
{
//assert([NSThread isMainThread]);
// Make sure that the value of each key has loaded successfully.
for (NSString *thisKey in requestedKeys)
{
NSError *error = nil;
AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
if (keyStatus == AVKeyValueStatusFailed)
{
BVLogWarn(#"%#: %#", THIS_FILE, error.localizedDescription);
[self handleErrorForProxy:error];
[self assetFailedToPrepareForPlayback];
return;
}
}
if (!asset.playable)
{
BVLogWarn(#"%#: Item cannot be played", THIS_FILE);
[self handleErrorForProxy:nil];
[self assetFailedToPrepareForPlayback];
return;
}
// Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
playerItem = [[AVPlayerItem alloc] initWithAsset:asset];
// Observe the player item "status" key to determine when it is ready to play.
[playerItem addObserver:self
forKeyPath:kStatusKey
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:BVPlayerItemStatusObserverContext];
[playerItem addObserver:self
forKeyPath:kBufferEmpty
options:NSKeyValueObservingOptionNew
context:BVPLayerBufferEmptyObserverContext];
[playerItem addObserver:self
forKeyPath:kLikelyToKeepUp
options:NSKeyValueObservingOptionNew
context:BVPlayerLikelyToKeepUpObserverContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:playerItem];
// Get a new AVPlayer initialized to play the specified player item.
player = [[AVPlayer alloc] initWithPlayerItem:playerItem];
// Do nothing if the item has finished playing
[player setActionAtItemEnd:AVPlayerActionAtItemEndNone];
/* Observe the AVPlayer "currentItem" property to find out when any
AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
occur.*/
[player addObserver:self
forKeyPath:kCurrentItemKey
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:BVCurrentItemObserverContext];
// Observe the AVPlayer "rate" property to update the scrubber control.
[player addObserver:self
forKeyPath:kRateKey
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:BVRateObserverContext];
[player replaceCurrentItemWithPlayerItem:playerItem];
}
- (void)observeValueForKeyPath:(NSString*) keyPath
ofObject:(id)object
change:(NSDictionary*)change
context:(void*)context
{
// AVPlayerItem "status" property value observer.
if (context == BVPlayerItemStatusObserverContext)
{
AVPlayerStatus status = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
switch (status)
{
case AVPlayerStatusUnknown:
{
[self removeTimeObserver];
[self syncTimeScrubber];
[timeControl setEnabled:NO];
[playButton setEnabled:NO];
[fullscreenButton setEnabled:NO];
[loadingIndicator startAnimating];
}
break;
case AVPlayerStatusReadyToPlay:
{
if (firstPlayback|becomeActive)
{
[timeControl setEnabled:YES];
[playButton setEnabled:YES];
[fullscreenButton setEnabled:YES];
[upperControls setHidden:NO];
[lowerControls setHidden:NO];
[loadingIndicator stopAnimating];
if (firstPlayback) {
[playbackView setNeedsDisplay];
}
if (self.shouldAutoplay)
[player play];
if (firstPlayback) {
timeRemaining.text = [NSString stringWithFormat:#"-%#", timeStringForSeconds(CMTimeGetSeconds(playerItem.duration) )];
}
firstPlayback = NO;
controlsHidden = NO;
if (!isSeeking)
[self startHideControlsTimer];
}
if (becomeActive) {
dispatch_async(dispatch_get_main_queue(), ^{
[player seekToTime:CMTimeMakeWithSeconds(lastTimeStop, NSEC_PER_SEC)
toleranceBefore:kCMTimeZero
toleranceAfter:kCMTimeZero
completionHandler:^(BOOL finished) {
if (finished && rateToRestoreAfterScrubbing)
{
[player setRate:rateToRestoreAfterScrubbing];
rateToRestoreAfterScrubbing = 0.f;
}
[self addTimeObserver];
[playbackView setPlayer:player];
becomeActive = NO;
}];
});
}else{
[self addTimeObserver];
}
}
break;
case AVPlayerStatusFailed:
{
AVPlayerItem *thePlayerItem = (AVPlayerItem *)object;
BVLogWarn(#"%#: %#", THIS_FILE, thePlayerItem.error.localizedDescription);
[self handleErrorForProxy:thePlayerItem.error];
[self assetFailedToPrepareForPlayback];
}
break;
}
}
// AVPlayer "rate" property value observer.
else if (context == BVRateObserverContext)
{
[self updatePlayPauseButton];
}
// AVPlayer "currentItem" buffer is empty observer
else if (context == BVPLayerBufferEmptyObserverContext)
{
[loadingIndicator startAnimating];
}
// AVPlayer "currentItem" is likely to keep up observer
else if (context == BVPlayerLikelyToKeepUpObserverContext)
{
[loadingIndicator stopAnimating];
}
// AVPlayer "currentItem" property observer.
else if (context == BVCurrentItemObserverContext)
{
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
// New player item null?
if (newPlayerItem == (id)[NSNull null])
{
[playButton setEnabled:NO];
[timeControl setEnabled:NO];
} else // Replacement of player currentItem has occurred
{
if (!becomeActive) {
[playbackView setPlayer:player];
}else{
}
[playbackView setVideoFillMode:[self scalingMode]];
[self updatePlayPauseButton];
}
}
else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
return;
}
Did you try to put some log before your if ? Maybe the notification is working but it's stucked because your if?
case AVPlayerStatusReadyToPlay:
{
NSLog(#"NOTIFICATION TEST PASSED");
if (firstPlayback|becomeActive) {}
}
Related
If the app is playing the audio and phone screen is locked then control screen is shown as below. I am not able to take any action on avplayer
In my appdelegate I implemented:
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
MPRemoteCommandCenter *rcc = [MPRemoteCommandCenter sharedCommandCenter];
[[rcc skipForwardCommand] setEnabled:NO];
[[rcc skipBackwardCommand] setEnabled:NO];
[[rcc nextTrackCommand] setEnabled:NO];
[[rcc previousTrackCommand] setEnabled:NO];
[[rcc skipForwardCommand] setEnabled:NO];
[[rcc skipBackwardCommand] setEnabled:NO];
rcc.playCommand.enabled = YES;
rcc.pauseCommand.enabled = YES;
[[MPRemoteCommandCenter sharedCommandCenter].playCommand addTarget:self action:#selector(play)];
[[MPRemoteCommandCenter sharedCommandCenter].pauseCommand addTarget:self action:#selector(pause)];
}
- (void) play {
[[MyVideoController instance] play];
}
- (void) pause {
[[MyVideoController instance] pause];
}
class MyVideoController consists of:
- (void) pause {
[self.avPlayer pause];
}
- (void) play {
[self.avPlayer play];
}
Even though these methods are triggered (added breakpoints to check), no action on avplayer is taken. No matter what, avplayer doesn't pause.
Is there any way to pause the avplayer?
EDIT 1:
Adding the complete code
In my AppDelegate:
- (void) remoteControlReceivedWithEvent: (UIEvent *) event {
[[ZVideoPlayerController instance] eventReceived:event];
if (event.type == UIEventTypeRemoteControl) {
switch (event.subtype) {
case UIEventSubtypeRemoteControlTogglePlayPause: {
break;
}
case UIEventSubtypeRemoteControlPlay: {
[[ZVideoPlayerController instance] play];
break;
}
case UIEventSubtypeRemoteControlPause: {
[[ZVideoPlayerController instance] pause];
break;
}
default:
break;
}
}
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
[self becomeFirstResponder];
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
[[UIApplication sharedApplication] endReceivingRemoteControlEvents];
}
I AM RECEIVING EVENTS BUT THE AUDIO DOESN'T PAUSE UPON CALLING PAUSE METHOD ON AVPLAYER.
EDIT 2:
instance declaration in PlayerController class
+ (instancetype)instance {
static id instance = nil;
if (instance == nil)
{
static dispatch_once_t onceToken = 0;
dispatch_once(&onceToken, ^(void) {
NSAssert(instance == nil, #"Singleton instance is already allocated.");
instance = [[super allocWithZone:NULL] init];
});
}
return instance;
}
initialising AVPlayer
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:avAsset];
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayback error:nil];
NSError *activationError = nil;
BOOL success = [[AVAudioSession sharedInstance] setActive: YES error: &activationError];
NSMutableDictionary *songInfo = [[NSMutableDictionary alloc] init];
MPMediaItemArtwork *albumArt = [[MPMediaItemArtwork alloc] initWithImage: [UIImage imageNamed:#"Audio_Thumbnail_Play"]];
[songInfo setObject:title forKey:MPMediaItemPropertyTitle];
[songInfo setObject:#"100" forKey:MPMediaItemPropertyPlaybackDuration];
[songInfo setObject:albumArt forKey:MPMediaItemPropertyArtwork];
[[MPNowPlayingInfoCenter defaultCenter] setNowPlayingInfo:songInfo];
self.avPlayer = [AVPlayer playerWithPlayerItem:playerItem];
self.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
I found a solution to the problem. As I was getting nil value of avPlayer, I used my PageViewController class to get the instance of PlayerController. Then I used the instance of this playerController to play and pause my avplayer because this instance holds the reference to avPlayer.
- (PlayerController *)getVideoController {
NSArray *controllers = [UtiliyClass getNavigationController].viewControllers;
PageViewController *pageController = nil;
for (UIViewController *cont in controllers) {
if ([cont isKindOfClass:[PageViewController class]]) {
pageController = (PageViewController *)cont;
break;
}
}
if (pageController == nil) {
return nil;
}
NSArray *objectsController =pageController.pageController.viewControllers;
PlayerController *videoPlayerController = nil;
for (UIViewController *item in objectsController) {
if ([item isKindOfClass:[PlayerController class]]) {
videoPlayerController = (PlayerController *)item;
break;
}
}
return videoPlayerController;
}
- (void) pause {
PlayerController *controller = [self getVideoController];
[controller.avPlayer pause];
}
- (void) play {
PlayerController *controller = [self getVideoController];
[controller.avPlayer play];
}
You need to register for remote notification to update player state when application is locked.For that follow following:
Add this in your AppDelegate , Ideally in applicationDidEnterBackground:
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
[self becomeFirstResponder];
And this in applicationDidBecomeActive:
[[UIApplication sharedApplication] endReceivingRemoteControlEvents];
Recieve remote notifcations by adding this in AppDelagate. This will listen all actions when phone is locked.
- (void)remoteControlReceivedWithEvent:(UIEvent *)event {
if (event.type == UIEventTypeRemoteControl){
// Call method of your player where you want to make change (Pause , Paly),
// I am calling a shared view for example, Its up to your logic how you want to deal it
[[AudioPlayerView sharedPlayerView] remoteControlReceivedWithEvent:event];
}
}
And in that get your desired event and update state accordingly
- (void)remoteControlReceivedWithEvent:(UIEvent *)event {
if (event.type == UIEventTypeRemoteControl){
switch (event.subtype){
case UIEventSubtypeRemoteControlPlay:
[[MyVideoController instance] play];
break;
case UIEventSubtypeRemoteControlPause:
[[MyVideoController instance] pause];
break;
case UIEventSubtypeRemoteControlTogglePlayPause:
// Check if state is playing , call pause else call play
break;
}
default:
break;
}
}
}
In iOS 7.1 and later, use the shared MPRemoteCommandCenter object to register for remote control events. You do not need to call this method when using the shared command center object.
This method starts the delivery of remote control events using the responder chain. Remote-control events originate as commands issued by headsets and external accessories that are intended to control multimedia presented by an app. To stop the reception of remote-control events, you must call endReceivingRemoteControlEvents().
Add this following code for in didfinishlunching for init audio season and get remote control event :
// Initialize the AVAudioSession here.
if (![[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&myErr]) {
// Handle the error here.
NSLog(#"Audio Session error %#, %#", myErr, [myErr userInfo]);
}
else{
// Since there were no errors initializing the session, we'll allow begin receiving remote control events
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
}
for reciving commadn use this code :
- (void)remoteControlReceivedWithEvent:(UIEvent *)receivedEvent {
if (receivedEvent.type == UIEventTypeRemoteControl) {
switch (receivedEvent.subtype) {
case UIEventSubtypeRemoteControlPreviousTrack:
break;
case UIEventSubtypeRemoteControlNextTrack:
break;
case UIEventSubtypeRemoteControlPlay:
[[MyVideoController instance] play];
break;
case UIEventSubtypeRemoteControlPause:
[[MyVideoController instance] pause];
break;
default:
break;
}
}
}
I created a sample video app using AVPlayerViewController. It is working fine. But the problems are after finishing the video the AVPlayerViewController view is not removing. Other problem is set the constraints for AVPlayerViewController for portrait default size in landscape mode full screen. I am new for constraints. Please download the project and run the project logger shows the constraint problems.
Source code project : http://www.filedropper.com/avplayerdemos
I see two problem with your code:
asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler: ^{
dispatch_async(
dispatch_get_main_queue(), ^{
if (!asset.playable) {
return;
} else {
[self prepareToPlayAsset: asset withRequestedKeys: requestedKeys];
}
if (videoPlayerItem) {
[videoPlayerItem removeObserver:self forKeyPath:kkStatusKey];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemDidPlayToEndTimeNotification
object: videoPlayerItem];
}
});
}];
this code above you add observer and remove after. so should change two:
[asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler: ^{
dispatch_async(
dispatch_get_main_queue(), ^{
if (videoPlayerItem) {
[videoPlayerItem removeObserver:self forKeyPath:kkStatusKey];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemDidPlayToEndTimeNotification
object: videoPlayerItem];
}
if (!asset.playable) {
return;
} else {
[self prepareToPlayAsset: asset withRequestedKeys: requestedKeys];
}
});
}];
Problem 2:
- (void)prepareToPlayAsset: (AVURLAsset *)asset withRequestedKeys: (NSArray *)requestedKeys {
for (NSString *thisKey in requestedKeys) {
NSError *error = nil;
AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
switch (keyStatus) {
case AVKeyValueStatusUnknown:
NSLog(#"%# AVKeyValueStatusUnknown", thisKey);
break;
case AVKeyValueStatusFailed:
NSLog(#"Error! PlayAsset failed.\nAVKey : %#.\nError: %#", thisKey, error);
return;
break;
case AVKeyValueStatusLoading:
NSLog(#"%# AVKeyValueStatusLoading", thisKey);
break;
case AVKeyValueStatusCancelled:
NSLog(#"%# AVKeyValueStatusCancelled", thisKey);
break;
case AVKeyValueStatusLoaded: {
videoPlayerItem = [AVPlayerItem playerItemWithAsset: asset];
[videoPlayerItem addObserver:self forKeyPath: kkStatusKey options:0 context:nil];
videoPlayer = [AVPlayer playerWithPlayerItem: videoPlayerItem];
/**
* Creating the videoAdplayer through passing the avplayer object
*/
[self createVideoPlayer: videoPlayer];
if ([thisKey isEqualToString: #"duration"]) {
} else if ([thisKey isEqualToString: #"tracks"]) {
NSLog(#"\n\n asset.tracks : %# \n\n", asset.tracks);
} else if ([thisKey isEqualToString: #"metadata"]) {
NSLog(#"\n\n assetMetadata : %# \n\n", asset.metadata);
}
}
break;
default:
break;
}
}
if (!asset.playable) {
return;
}
}
In this loop just check eveything load and return if have failed. In this case have 2 key, you code like this will add two childviewcontroller and it will play two item player. So change code to it:
- (void)prepareToPlayAsset: (AVURLAsset *)asset withRequestedKeys: (NSArray *)requestedKeys {
for (NSString *thisKey in requestedKeys) {
NSError *error = nil;
AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
switch (keyStatus) {
case AVKeyValueStatusUnknown:
NSLog(#"%# AVKeyValueStatusUnknown", thisKey);
break;
case AVKeyValueStatusFailed:
NSLog(#"Error! PlayAsset failed.\nAVKey : %#.\nError: %#", thisKey, error);
return;
break;
case AVKeyValueStatusLoading:
NSLog(#"%# AVKeyValueStatusLoading", thisKey);
break;
case AVKeyValueStatusCancelled:
NSLog(#"%# AVKeyValueStatusCancelled", thisKey);
break;
case AVKeyValueStatusLoaded: {
}
break;
default:
break;
}
}
videoPlayerItem = [AVPlayerItem playerItemWithAsset: asset];
[videoPlayerItem addObserver:self forKeyPath: kkStatusKey options:0 context:nil];
videoPlayer = [AVPlayer playerWithPlayerItem: videoPlayerItem];
/**
* Creating the videoAdplayer through passing the avplayer object
*/
[self createVideoPlayer: videoPlayer];
if (!asset.playable) {
return;
}
}
I pretty sure that with your demo. Change like this, it will work ok.
I have this issue and this is not so easy as I thought.... I lost some of time and nothing yet.
And I know, I have a method pause, but I want to stop and start and avoid crash the application..
Could anyone help, very thanks.
//First.
-(void) setupAVPlayerForURL{
#try {
NSURL *url = [[NSURL alloc] initWithString:URL_RADIO_STREAM];
AVAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
anItem = [AVPlayerItem playerItemWithAsset:asset];
[anItem addObserver:self forKeyPath:#"status" options:NSKeyValueObservingOptionNew context:nil];
[anItem addObserver:self forKeyPath:#"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil];
[anItem addObserver:self forKeyPath:#"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil];
player = [AVPlayer playerWithPlayerItem:anItem];
//[player addObserver:self forKeyPath:#"status" options:0 context:nil];
}
#catch (NSException *exception) {
NSLog(#"%#", exception.reason);
[[self textDebug]setText : exception.description];
}
}
//After.
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
/*if (!player)
{
return;
}*/
if ([object isKindOfClass:[AVPlayerItem class]])
{
AVPlayerItem *playerItem = (AVPlayerItem *)object;
if ([keyPath isEqualToString:#"status"])
{ //yes->check it...
switch(playerItem.status)
{
case AVPlayerItemStatusFailed:
NSLog(#"player item status failed");
self.BtnPlay.userInteractionEnabled = FALSE;
break;
case AVPlayerItemStatusReadyToPlay:
NSLog(#"player item status is ready to play");
self.BtnPlay.userInteractionEnabled = TRUE;
break;
case AVPlayerItemStatusUnknown:
NSLog(#"player item status is unknown");
self.BtnPlay.userInteractionEnabled = FALSE;
break;
}
}
if ([keyPath isEqualToString:#"playbackBufferEmpty"])
{
NSLog(#"player item status playbackBufferEmpty");
if (playerItem.playbackBufferEmpty) {
//Your code here
[[NSNotificationCenter defaultCenter] postNotificationName:#"message" object:#"Buffering..."];
if([[UIApplication sharedApplication] applicationState] == UIApplicationStateBackground)
{
task = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^(void) {
[self setupAVPlayerForURL];
}];
}
}
}
if ([keyPath isEqualToString:#"playbackLikelyToKeepUp"])
{
NSLog(#"player item status playbackLikelyToKeepUp");
if (playerItem.playbackLikelyToKeepUp)
{
//Your code here
[player play];
if([[UIApplication sharedApplication] applicationState] == UIApplicationStateBackground)
{
[[UIApplication sharedApplication] endBackgroundTask:task];
task = 0;
}
}
}
}
}
//Finally.
-(IBAction) BtnPlay:(id)sender {
if(self.BtnPlay.touchInside){
if (player.rate == 1.0) {
[player pause];
[BtnPlay setTitle:#"Play" forState:UIControlStateNormal];
} else {
[player play];
[BtnPlay setTitle:#"Pause" forState:UIControlStateNormal];
}
}
}
//But I don't know how to Stop this instead pause...
I have a class that is handling an AVPlayer (and AVPlayerItem) that reports back state, time, and timedMetadata to a delegate.
Works well except that about 70-80% of the time, the initial timedMetadata is not "key value observed". However after the first instance of timedMetadata being missed, all other timedMetadata seems to be observed without issue.
As a temporary fix, I've started to embed dummy timedMetadata tags in the beginning of videos that do nothing but "kick the tires" so to speak and everything works fine after that. Yet this seems pretty kludgy. I suspect that either I'm setting up the AVPlayerItem and KVO in a sub-optimal manner OR there's just a bug here.
Any ideas on why this might be happening are greatly appreciated! Code below....
// CL: Define constants for the key-value observation contexts.
static const NSString *ItemStatusContext;
static const NSString *ItemMetadataContext;
static const NSString *ItemPlaybackForcastContext;
- (id)initWithURL:(NSURL *)url
{
if (self = [super init]) {
__weak TFPAVController *_self = self;
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
NSString *tracksKey = #"tracks";
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:tracksKey] completionHandler:
^{
dispatch_async(dispatch_get_main_queue(),
^{
NSError *error = nil;
AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey error:&error];
if (status == AVKeyValueStatusLoaded) {
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:asset];
[item addObserver:_self forKeyPath:#"status" options:0 context:&ItemStatusContext];
[item addObserver:_self forKeyPath:#"timedMetadata" options:0 context:&ItemMetadataContext];
[item addObserver:_self forKeyPath:#"playbackLikelyToKeepUp" options:0 context:&ItemPlaybackForcastContext];
[[NSNotificationCenter defaultCenter] addObserver:_self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:item];
AVPlayer *player = [AVPlayer playerWithPlayerItem:item];
_self.totalRunTime = CMTimeGetSeconds(item.duration);
[_self.delegate avPlayerNeedsView:player];
_self.playerItem = item;
_self.player = player;
}
else {
NSLog(#"The asset's tracks were not loaded: %# // [%# %#]",
error.localizedDescription,
NSStringFromClass([self class]),
NSStringFromSelector(_cmd));
}
_self.playerObserver = [_self.player addPeriodicTimeObserverForInterval:CMTimeMake(1, _FrameRate_)
queue:NULL
usingBlock: ^(CMTime time) {
_self.currentVideoTime = CMTimeGetSeconds([_self.playerItem currentTime]);
}];
});
}];
}
return self;
}
#pragma mark - KVO Response Methods
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context
{
__weak TFPAVController *_self = self;
if (context == &ItemStatusContext) {
dispatch_async(dispatch_get_main_queue(),
^{
if (((AVPlayerItem *)object).status == AVPlayerItemStatusReadyToPlay) {
[_self.delegate videoIsLoadedInPlayer:_self];
}
});
return;
}
else if (context == &ItemMetadataContext) {
dispatch_async(dispatch_get_main_queue(),
^{
[_self checkMetaDataForPlayerItem: (AVPlayerItem *)object];
});
return;
}
else if (context == &ItemPlaybackForcastContext) {
dispatch_async(dispatch_get_main_queue(),
^{
AVPlayerItem *playerItem = object;
if (CMTimeGetSeconds([playerItem currentTime]) <= 0) return;
NSDictionary *notificationDictionary = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:playerItem.playbackLikelyToKeepUp]
forKey:kAVPlayerStateKey];
[[NSNotificationCenter defaultCenter] postNotificationName:kAVPlayerNotification
object:self
userInfo:notificationDictionary];
});
return;
}
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
- (void)checkMetaDataForPlayerItem:(AVPlayerItem *)item
{
NSMutableDictionary *metaDict = [NSMutableDictionary dictionary];
// CL: make sure there's stuff there
if (item.timedMetadata != nil && [item.timedMetadata count] > 0) {
// CL: if there is, cycle through the items and create a Dictionary
for (AVMetadataItem *metadata in item.timedMetadata) {
[metaDict setObject:[metadata valueForKey:#"value"] forKey:[metadata valueForKey:#"key"]];
}
// CL: pass it to the delegate
[self.delegate parseNewMetaData:[NSDictionary dictionaryWithDictionary:metaDict]];
}
}
Ahhh, KVO. Probably one of Apple's all-time worst design decisions.
I guess it's no longer relevant, but at a guess the problem you're having is that sometimes the value you're trying to observe has already been assigned to the key when you get around to adding yourself as an observer, so your observer selector isn't called.
To avoid this you can add NSKeyValueObservingOptionInitial to the options when calling addObserver:forKeyPath:options:context:, and your observer method will be invoked immediately with the current value.
I am trying to build an app that will change the composition of a video using the AVFoundation class but will like to just be able to play the video first.
I have write some code to do that but when i play the video the sound does not work.
I tried to use the Apple sample code to do this.
Below is my code:
-(void)loadAssets
{
NSArray *requestedKeys = [NSArray arrayWithObjects:kTracksKey, kPlayableKey, nil];
/* Tells the asset to load the values of any of the specified keys that are not already loaded. */
[asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
^{
dispatch_async( dispatch_get_main_queue(),
^{
/* IMPORTANT: Must dispatch to main queue in order to operate on the AVPlayer and AVPlayerItem. */
[self prepareToPlayAsset:asset withKeys:requestedKeys];
});
}];
}
- (void)prepareToPlayAsset:(AVURLAsset *)assetURL withKeys:(NSArray *)requestedKeys
{
/* Make sure that the value of each key has loaded successfully. */
for (NSString *thisKey in requestedKeys)
{
NSError *error = nil;
AVKeyValueStatus keyStatus = [assetURL statusOfValueForKey:thisKey error:&error];
if (keyStatus == AVKeyValueStatusFailed)
{
[self assetFailedToPrepareForPlayback:error];
return;
}
}
/* Use the AVAsset playable property to detect whether the asset can be played. */
if (!assetURL.playable)
{
/* Generate an error describing the failure. */
NSString *localizedDescription = NSLocalizedString(#"Item cannot be played", #"Item cannot be played description");
NSString *localizedFailureReason = NSLocalizedString(#"The assets tracks were loaded, but could not be made playable.", #"Item cannot be played failure reason");
NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
localizedDescription, NSLocalizedDescriptionKey,
localizedFailureReason, NSLocalizedFailureReasonErrorKey,
nil];
NSError *assetCannotBePlayedError = [NSError errorWithDomain:#"StitchedStreamPlayer" code:0 userInfo:errorDict];
/* Display the error to the user. */
[self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
return;
}
/* At this point we're ready to set up for playback of the asset. */
/* Stop observing our prior AVPlayerItem, if we have one. */
if (self.playerItem)
{
/* Remove existing player item key value observers and notifications. */
[self.playerItem removeObserver:self forKeyPath:kStatusKey];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemDidPlayToEndTimeNotification
object:self.playerItem];
}
/* Create a new instance of AVPlayerItem from the now successfully loaded AVAsset. */
self.playerItem = [AVPlayerItem playerItemWithAsset:asset];
/* Observe the player item "status" key to determine when it is ready to play. */
[self.playerItem addObserver:self
forKeyPath:kStatusKey
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:VideoPlaybackViewControllerStatusObservationContext];
/* When the player item has played to its end time we'll toggle
the movie controller Pause button to be the Play button */
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:self.playerItem];
seekToZeroBeforePlay = NO;
/* Create new player, if we don't already have one. */
if (![self player])
{
/* Get a new AVPlayer initialized to play the specified player item. */
self.player=[AVPlayer playerWithPlayerItem:self.playerItem];
[self.playerView setPlayer:self.player];
// Observe the AVPlayer "currentItem" property to find out when any
//AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
//occur.
[self.player addObserver:self
forKeyPath:kCurrentItemKey
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:VideoPlaybackViewControllerCurrentItemObservationContext];
}
// Make our new AVPlayerItem the AVPlayer's current item.
if (self.player.currentItem != self.playerItem)
{
// Replace the player item with a new player item. The item replacement occurs
//asynchronously; observe the currentItem property to find out when the
//replacement will/did occur
[[self player] replaceCurrentItemWithPlayerItem:self.playerItem];
[self syncUI];
}
}
- (void)observeValueForKeyPath:(NSString*) path
ofObject:(id)object
change:(NSDictionary*)change
context:(void*)context
{
/* AVPlayerItem "status" property value observer. */
if (context == VideoPlaybackViewControllerStatusObservationContext)
{
[self syncUI];
AVPlayerStatus status = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
switch (status)
{
/* Indicates that the status of the player is not yet known because
it has not tried to load new media resources for playback */
case AVPlayerStatusUnknown:
{
[self disablePlayerButtons];
}
break;
case AVPlayerStatusReadyToPlay:
{
/* Once the AVPlayerItem becomes ready to play, i.e.
[playerItem status] == AVPlayerItemStatusReadyToPlay,
its duration can be fetched from the item. */
//[self initScrubberTimer];
//[self enableScrubber];
[self enablePlayerButtons];
}
break;
case AVPlayerStatusFailed:
{
AVPlayerItem *playerItem = (AVPlayerItem *)object;
[self assetFailedToPrepareForPlayback:playerItem.error];
}
break;
}
}
/* AVPlayer "currentItem" property observer.
Called when the AVPlayer replaceCurrentItemWithPlayerItem:
replacement will/did occur. */
else if (context == VideoPlaybackViewControllerCurrentItemObservationContext)
{
AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
/* Is the new player item null? */
if (newPlayerItem == (id)[NSNull null])
{
[self disablePlayerButtons];
//[self disableScrubber];
}
else /* Replacement of player currentItem has occurred */
{
/* Set the AVPlayer for which the player layer displays visual output. */
[self.playerView setPlayer:self.player];
//[self setViewDisplayName];
/* Specifies that the player should preserve the video’s aspect ratio and
fit the video within the layer’s bounds. */
//[mPlaybackView setVideoFillMode:AVLayerVideoGravityResizeAspect];
[self syncUI];
}
}
else
{
[super observeValueForKeyPath:path ofObject:object change:change context:context];
}
}
Any help?