I am working on MPMoviePlayerController.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0]; // Get documents folder
NSString *fileURL = [NSString stringWithFormat:#"%#/FileName.mp4", documentsDirectory];
myMoviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:fileURL]];
myMoviePlayer.backgroundView.backgroundColor = [UIColor blackColor];
[[myMoviePlayer view] setFrame:CGRectMake(0, 0, self.view.frame.size.width, getReadyMoviePlayerView.frame.size.height)];
[myMoviePlayerView addSubview:[myMoviePlayer view]];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(moviePlayBackDidFinished:) name:MPMoviePlayerPlaybackStateDidChangeNotification object:myMoviePlayer];
myMoviePlayer.controlStyle = MPMovieControlStyleEmbedded;
myMoviePlayer.shouldAutoplay = NO;
myMoviePlayer.fullscreen = NO;
myMoviePlayer.repeatMode = MPMovieRepeatModeOne;
[myMoviePlayer prepareToPlay];
[myMoviePlayer play ];
-(void)moviePlayBackDidFinished:(NSNotification *)dict{
if (dict.object == myMoviePlayer) {
NSInteger reason = [[dict.userInfo objectForKey:MPMoviePlayerPlaybackDidFinishReasonUserInfoKey] integerValue];
if (reason == MPMovieFinishReasonPlaybackEnded){
[myMoviePlayer prepareToPlay];
[myMoviePlayer play];
}
}
}
Now the problem is that, The play and pause button is not working. I am not able to pause the video by using MPMoviePlayer Controls.
This problem is with both OS that is iOS 7 and iOS 8 also.
Is there any solution to pause the running video.
Thank you
Related
I have multiple imageview subviews getting stacked based on my incoming data. Basically all these subviews are either set to an image or a video layer based on my incoming data. The problem i have is playing videos. i can play the first video in the stack but every video after that is just the sound of the first video. How can i play each accordingly?
the views are navigated through with a tap event like snapchat. see below:
#interface SceneImageViewController ()
#property (strong, nonatomic) NSURL *videoUrl;
#property (strong, nonatomic) AVPlayer *avPlayer;
#property (strong, nonatomic) AVPlayerLayer *avPlayerLayer;
#end
#implementation SceneImageViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.mySubviews = [[NSMutableArray alloc] init];
self.videoCounterTags = [[NSMutableArray alloc] init];
int c = (int)[self.scenes count];
c--;
NSLog(#"int c = %d", c);
self.myCounter = [NSNumber numberWithInt:c];
for (int i=0; i<=c; i++) {
//create imageView
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
//get scene object
PFObject *sceneObject = self.scenes[i];
//get the PFFile and filetype
PFFile *file = [sceneObject objectForKey:#"file"];
NSString *fileType = [sceneObject objectForKey:#"fileType"];
//check the filetype
if ([fileType isEqual: #"image"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
//get image
NSURL *imageFileUrl = [[NSURL alloc] initWithString:file.url];
NSData *imageData = [NSData dataWithContentsOfURL:imageFileUrl];
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = [UIImage imageWithData:imageData];
});
});
}
//its a video
else
{
// the video player
NSURL *fileUrl = [NSURL URLWithString:file.url];
self.avPlayer = [AVPlayer playerWithURL:fileUrl];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
self.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
//self.avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
CGRect screenRect = [[UIScreen mainScreen] bounds];
self.avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:self.avPlayerLayer];
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
//[self.avPlayer play];
}
}
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(viewTapped:)];
[self.view bringSubviewToFront:self.screen];
[self.screen addGestureRecognizer:tapGesture];
}
- (void)viewTapped:(UIGestureRecognizer *)gesture{
NSLog(#"touch!");
[self.avPlayer pause];
int i = [self.myCounter intValue];
NSLog(#"counter = %d", i);
for(UIImageView *subview in [self.view subviews]) {
if(subview.tag== i) {
[subview removeFromSuperview];
}
}
if ([self.videoCounterTags containsObject:self.myCounter]) {
NSLog(#"play video!!!");
[self.avPlayer play];
}
if (i == 0) {
[self.avPlayer pause];
[self.navigationController popViewControllerAnimated:NO];
}
i--;
self.myCounter = [NSNumber numberWithInt:i];
NSLog(#"counter after = %d", i);
}
What Brooks Hanes said is correct you keep overriding the avplayer.
This is what i suggest for you to do:
Add the tap gesture to the imageView instead of the screen (or for a cleaner approach use UIButton instead):
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:imageView action:#selector(viewTapped:)];
[imageView addGestureRecognizer:tapGesture];
This way in your viewTapped: method you could get the tag of the pressed image like so: gesture.view.tag instead of using the myCounter.
To get the video working you could create a new AVPlayer for each video but that might turn quite expensive memory wise. A better approach will be to use AVPlayerItem and switch the AVPlayer's AVPlayerItem when changing the video.
So in the for loop do something like this where self.videoFiles is a NSMutableDictionary property:
// the video player
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSURL *fileUrl = [NSURL URLWithString:file.url];
//save your video file url paired with the ImageView it belongs to.
[self.videosFiles setObject:fileUrl forKey:tag];
// you only need to initialize the player once.
if(self.avPlayer == nil){
AVAsset *asset = [AVAsset assetWithURL:fileUrl];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer = [[AVPlayer alloc] initWithPlayerItem:item];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
}
// you don't need to keep the layer as a property
// (unless you need it for some reason
AVPlayerLayer* avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
CGRect screenRect = [[UIScreen mainScreen] bounds];
avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:avPlayerLayer];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
Now in your viewTapped:
if ([self.videoCounterTags containsObject:gesture.view.tag]) {
NSLog(#"play video!!!");
AVAsset *asset = [AVAsset assetWithURL:[self.videoFiles objectForKey:gesture.view.tag]];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer replaceCurrentItemWithPlayerItem: item];
[self.avLayer play];
}
Or use the self.videoFiles instead and then you don't need self.videoCounterTags at all:
NSURL* fileURL = [self.videoFiles objectForKey:gesture.view.tag];
if (fileURL!=nil) {
NSLog(#"play video!!!");
AVAsset *asset = [AVAsset assetWithURL:fileURL];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer replaceCurrentItemWithPlayerItem: item];
[self.avLayer play];
}
That's the gist of it.
Take a look at the way you're setting up the myCounter variable. It is set one time and never changes until a view is tapped, and then it is set to the count of scenes, -1.
In addition, try looking at the way you're setting to the _avPlayer pointer var. It's always being set, over and over, and it seems that in a for loop you'd want to be storing references instead of simply updating the same pointer to the value latest in collection of scenes.
Also, from Apple's documentation:
You can create arbitrary numbers of player layers with the same AVPlayer object. Only the most recently created player layer will actually display the video content on-screen.
So, it's possible that since you're using the same AVPlayer object to create all these AVPlayer layers, that you're never going to see any more than one actual video layer work.
There are two video file.When I want to see these two video
1) Only the last video file can play,see in fullscreen mode and also minimise when done button clicked..but first video file can't .
2) After a few time the first video file also see in black screen
- (void)viewDidLoad
{
[super viewDidLoad];
MPMoviePlayerController *moviePlayer;
NSArray *filename=#[#"nissan1",#"nissan5"];
//n![enter image description here][1]issan1,nissan5 are mp4 file
NSURL *fileURL1 = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:[filename objectAtIndex:0] ofType:#"mp4"]];
moviePlayer= [[MPMoviePlayerController alloc] initWithContentURL:fileURL1];
[moviePlayer.view setFrame:CGRectMake(5, 50, 100,100)];
moviePlayer.shouldAutoplay = NO;
moviePlayer.repeatMode = MPMovieRepeatModeOne;
moviePlayer.initialPlaybackTime = -1.0;
moviePlayer.movieSourceType = MPMovieSourceTypeFile;
[moviePlayer prepareToPlay];
[self.view addSubview:moviePlayer.view];
/* second video file*/
NSURL *fileURL2 = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:[filename objectAtIndex:1] ofType:#"mp4"]];
moviePlayer= [[MPMoviePlayerController alloc] initWithContentURL:fileURL2];
[moviePlayer.view setFrame:CGRectMake(200, 50, 100,100)];
moviePlayer.shouldAutoplay = NO;
moviePlayer.repeatMode = MPMovieRepeatModeNone;
moviePlayer.initialPlaybackTime = -1.0;
moviePlayer.movieSourceType = MPMovieSourceTypeFile;
[moviePlayer prepareToPlay];
[self.view addSubview:moviePlayer.view];
}
You should try with two instances. As Cristik pointed out, ARC targets it for deallocation once the method ends and you are using same variable.
Since you asked for sample code, try this:
- (void)viewDidLoad
{
[super viewDidLoad];
MPMoviePlayerController *moviePlayer1;
MPMoviePlayerController *moviePlayer2;
//filename is a name array
NSURL *fileURL1 = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:[filename objectAtIndex:0] ofType:#"mp4"]];
moviePlayer1= [[MPMoviePlayerController alloc] initWithContentURL:fileURL1];
[moviePlayer1.view setFrame:CGRectMake(5, 50, 100,100)];
moviePlayer1.shouldAutoplay = NO;
moviePlayer1.repeatMode = MPMovieRepeatModeOne;
moviePlayer1.initialPlaybackTime = -1.0;
moviePlayer1.movieSourceType = MPMovieSourceTypeFile;
[moviePlayer1 prepareToPlay];
[self.view addSubview:moviePlayer1.view];
/* second video file*/
NSURL *fileURL2 = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:[filename objectAtIndex:1] ofType:#"mp4"]];
moviePlayer2= [[MPMoviePlayerController alloc] initWithContentURL:fileURL2];
[moviePlayer2.view setFrame:CGRectMake(200, 50, 100,100)];
moviePlayer2.shouldAutoplay = NO;
moviePlayer2.repeatMode = MPMovieRepeatModeNone;
moviePlayer2.initialPlaybackTime = -1.0;
moviePlayer2.movieSourceType = MPMovieSourceTypeFile;
[moviePlayer2 prepareToPlay];
[self.view addSubview:moviePlayer2.view];
}
So it has been brought to my attention that I should optimise the way I load and use sounds in a small game that I am developing for iOS.
I load a "boing" sound and play it every time I tap the screen, making the sprite jump (like mario).
I want to be able to play the sound every time, even if the sound is already playing from the previous jump...
Below are the 2 ways I'm currently using:
1st implementation:
//load the music from file
-(void)LoadMusic{
jumpSound = [[NSBundle mainBundle] pathForResource:#"boing" ofType:#"mp3"];
}
//call in viewDidLoad
- (void)viewDidLoad{
[self LoadMusic];
...
[super viewDidLoad];
}
//play sound when called
-(void)playSound{
jumpAffect = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
[jumpAffect play];
}
//tap/touch to jump (& play sound)
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *) event{
[self playSound];
jumpUp = 16;
}
2nd implementation:, this is similar except I load the same file 5 times, and loop through to the next one (so the same sound affect can be called even if it's already in previous session).
int soundStage = 1;
//load the music from file
-(void)LoadMusic{
jumpSound = [[NSBundle mainBundle] pathForResource:#"Boing" ofType:#"mp3"];
jumpAffect = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
jumpAffect.delegate = self;
jumpAffect2 = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
jumpAffect2.delegate = self;
jumpAffect3 = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
jumpAffect3.delegate = self;
jumpAffect4 = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
jumpAffect4.delegate = self;
jumpAffect5 = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
jumpAffect5.delegate = self;
}
//call in viewDidLoad
- (void)viewDidLoad{
[self LoadMusic];
...
[super viewDidLoad];
}
//tap/touch to jump (& play sound)
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *) event{
jumpUp = 16;
if(soundStage == 1){
[jumpAffect play];
soundStage = 2;
}
else if(soundStage == 2){
[jumpAffect2 play];
soundStage = 3;
}
else if(soundStage == 3){
[jumpAffect3 play];
soundStage = 4;
}
else if(soundStage == 4){
[jumpAffect4 play];
soundStage = 5;
}
else if(soundStage == 5){
[jumpAffect5 play];
soundStage = 1;
}
I'm wondering which why is the better way? I'm hoping to avoid memory leaks and just optimise it by being able to continuously have the same sound play when the screen is tapped. Thanks.
AVAudioPlayer plays multiple sounds simultaneously, one sound per audio player. So better is to load/play your quick sound inside the touchesBegan method without keeping the reference:
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *) event
{
NSString *jumpSound = [[NSBundle mainBundle] pathForResource:#"boing" ofType:#"mp3"];
AVAudioPlayer *jumpAffect = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:jumpSound] error:NULL];
[jumpAffect prepareToPlay];
[jumpAffect play];
jumpUp = 16;
}
I am playing a video of youtube with the following method. When user pressed the video it plays the video in default ios mpmovieplayer. i need to know when the video starts playing and when the mpmovieplayer done button pressed. Any kind of help would be appreciated.
particularVideo videoInfo = (particularVideo) responseObj;
self.videoTitleLabel.text = videoInfo.videoTitle;
//NSString *vidUrl = [videoInfo.videoURL stringByReplacingOccurrencesOfString:#" " withString:#"%20"];
CGSize scrSize = [[UIScreen mainScreen] bounds].size;//appDel.window.frame.size;
float imgSclToX = scrSize.width/16.0;
int imgWidth = (int) (scrSize.width -18);
int imgHeight = (int) ((float)(9.0)*imgSclToX);
// DebugLog(#"Yo-----> Window: %# Width: %i Height: %i", NSStringFromCGSize(scrSize), imgWidth, imgHeight);
NSString *htmlStr = #"";
// htmlStr = [htmlStr stringByAppendingFormat:#"<iframe width=\"%i\" height=\"%i\" src=\"%#\" frameborder=\"0\" allowfullscreen></iframe>", imgWidth, imgHeight, vidUrl];
NSString *tStr = videoInfo.videoURL;
tStr = [tStr substringFromIndex:([tStr rangeOfString:#"v="].location+2)];
htmlStr = [NSString stringWithFormat:#"<iframe type=\"application/x-shockwave-flash\" width=\"%i\" height=\"%i\" src=\"https://www.youtube.com/embed/%#\" frameborder=\"0\" allowfullscreen></iframe>", imgWidth, imgHeight, tStr];
NSString *embedHTML = [NSString stringWithFormat:#"<html><head></head><body>%#<p><b>Artist:</b> %#<br><b>Composer:</b>%#<p>%#</p></body></html>", htmlStr, videoInfo.videoArtist, videoInfo.videoCompositor, videoInfo.videoDescription];
[self.myWebview loadHTMLString: embedHTML baseURL: nil];
try this... i have used this in my project..
it will tell you that Enough data has been buffered for playback to continue uninterrupted.
-(void)movieLoadStateDidChange:(id)sender{
if(MPMovieLoadStatePlaythroughOK ) {
NSLog(#"State is Playable OK");
NSLog(#"Enough data has been buffered for playback to continue uninterrupted..");
}
for you need use NSNotificationCenter
- (void)viewDidAppear:(BOOL)animated {
NSLog(#"VIEW DID LOAD");
// Register to receive a notification that the movie is now in memory and ready to play
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(movieLoadStateDidChange:)
name:MPMoviePlayerLoadStateDidChangeNotification
object:nil];
self.movieplayer = [[MPMoviePlayerController alloc]initWithContentURL: [NSURL URLWithString:[self urlencode:self.strPlayUrl]]];
[[ self.movieplayer view] setFrame: CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[self.view addSubview: [ self.movieplayer view]];
[ self.movieplayer setShouldAutoplay:YES];
[ self.movieplayer prepareToPlay];
[ self.movieplayer play];
[self.view insertSubview:self. self.movieplayer.view belowSubview:self.indicator];
}
I have generated that MOV file from the screen shots of the UI and record the sound. Combined both video and audio and generate a MOV formatted movie.
I have seen quite a lot of MPMoviePlayerViewController sample but it just shows me black screen. I tried AVPlayer but I can't get it work.
I'm new to playing movie file in iOS, please help.
Here is my code:
NSString *fileNamePath = mVideo;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *oldappSettingsPath = [documentsDirectory stringByAppendingPathComponent:fileNamePath];
NSURL *path = [NSURL fileURLWithPath:oldappSettingsPath];
self.mPlayer = [[MPMoviePlayerController alloc] initWithContentURL:path];
self.mPlayer.controlStyle = MPMovieControlStyleFullscreen;
self.mPlayer.fullscreen = YES;
self.mPlayer.scalingMode = MPMovieScalingModeFill;
[[self.mPlayer view] setFrame: CGRectMake(0, 0, 480, 320)];
[self.view addSubview:[self.mPlayer view]];
[self.mPlayer prepareToPlay];
[self.mPlayer play];
I have found the fixed to my MPMoviePlayerController :
NSURL *path = [NSURL fileURLWithPath:oldappSettingsPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:oldappSettingsPath]) {
NSLog(#"Exist");
self.mPlayer = [[MPMoviePlayerController alloc] initWithContentURL:path];
self.mPlayer.movieSourceType = MPMovieSourceTypeFile;
self.mPlayer.controlStyle = MPMovieControlStyleFullscreen;
self.mPlayer.fullscreen = YES;
self.mPlayer.scalingMode = MPMovieScalingModeFill;
[[self.mPlayer view] setFrame: CGRectMake(0, 0, 480, 320)];
[self.view addSubview:[self.mPlayer view]];
[self.mPlayer prepareToPlay];
[self.mPlayer play];
} else {
NSLog(#"Don't Exist");
}
I added also:
self.mPlayer.movieSourceType = MPMovieSourceTypeFile;
in my case the player shows black screen since the file i played is not there, so i added a checking first if the file exist or not.