How to set AVPlayer Sound Level meter in iOS? - ios

I'm using AVPlayer for my app using HTTP live streaming.Now I want to implement a level meter for that audio stream.
I found several examples using AVAudioPlayer. But I cannot find a solution for getting the required informations off.
AVPlayer.NSURL *url = [NSURL URLWithString:#"http://www.stephaniequinn.com/Music/Allegro%20from%20Duet%20in%20C%20Major.mp3"];
self.playerItem = [AVPlayerItem playerItemWithURL:url];
self.player = [AVPlayer playerWithPlayerItem:self.playerItem];
self.player = [AVPlayer playerWithURL:url];
[self.player play];

Please try this one
if ([mPlayer respondsToSelector:#selector(setVolume:)]) {
mPlayer.volume = 0.0;
} else {
NSArray *audioTracks = mPlayerItem.asset.tracks;
// Mute all the audio tracks
NSMutableArray *allAudioParams = [NSMutableArray array];
for (AVAssetTrack *track in audioTracks) {
AVMutableAudioMixInputParameters *audioInputParams =[AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:0.0 atTime:kCMTimeZero];
[audioInputParams setTrackID:[track trackID]];
[allAudioParams addObject:audioInputParams];
}
AVMutableAudioMix *audioZeroMix = [AVMutableAudioMix audioMix];
[audioZeroMix setInputParameters:allAudioParams];
[mPlayerItem setAudioMix:audioZeroMix]; // Mute the player item
}

Related

iOS objective c How to load next coming video while playing current video without delay

In my application ai have loaded all video url's to AVPlayer and it has previous and next buttons here i have setup player
-(void)setUpMyNewPlayer
{
[self addTimer];
NSURL *url=[NSURL URLWithString:_videosArray[0]];
_currentIndex =0;
videoPlayer = [[AVPlayer alloc]init]; //WithPlayerItem:_avPlayerItem];
videoPlayer.automaticallyWaitsToMinimizeStalling = NO;
AVAsset *asset = [AVAsset assetWithURL:url];
[asset loadValuesAsynchronouslyForKeys:#[#"playable"] completionHandler:^{
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:asset];
videoPlayer = [AVPlayer playerWithPlayerItem:item];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:videoPlayer];
playerLayer.videoGravity = AVLayerVideoGravityResize;
playerLayer.frame = CGRectMake(0, 0, self.playView.frame.size.width, self.playView.frame.size.height);
[self.playView.layer addSublayer:playerLayer];
[videoPlayer play];
CMTime interval = CMTimeMakeWithSeconds(0.5, NSEC_PER_SEC);
dispatch_queue_t mainQueue = dispatch_get_main_queue();
__weak typeof(self) weakSelf = self;
[videoPlayer addPeriodicTimeObserverForInterval:interval
queue:mainQueue
usingBlock:^(CMTime time) {
// Use weak reference to self
if (_currentIndex==_contentImages.count-1) {
weakSelf.nextButton.hidden=YES;
weakSelf.previousButton.hidden=NO;
}
else if (_currentIndex==0)
{
weakSelf.previousButton.hidden=YES;
if (_contentImages.count>1) {
weakSelf.nextButton.hidden=NO;
}
else
{
weakSelf.nextButton.hidden=YES;
}
}
else if (_currentIndex>0 && _currentIndex!=_contentImages.count-1)
{
// NSLog(#"Showing Both");
weakSelf.nextButton.hidden=NO;
weakSelf.previousButton.hidden=NO;
}
}];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(itemDidFinishPlaying1:) name:AVPlayerItemDidPlayToEndTimeNotification object:item];
}];
}
-(void)itemDidFinishPlaying1:(NSNotification *) notification {
//
// Will be called when AVPlayer finishes playing playerItem
if (_currentIndex == _videosArray.count-1) {
}
else{
_currentIndex = _currentIndex+1;
NSURL *url=[NSURL URLWithString:_videosArray[_currentIndex]];
AVAsset *asset = [AVAsset assetWithURL:url];
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:asset];
[videoPlayer replaceCurrentItemWithPlayerItem:item];
[self changePage:UIPageViewControllerNavigationDirectionForward];
[self addTimer];
}
}
-(void)addTimer
{
myTimer = [NSTimer scheduledTimerWithTimeInterval: 0.1 target: self
selector: #selector(callAfterOneSecond1:) userInfo: nil repeats: YES];
}
-(void) callAfterOneSecond1:(NSTimer*)t
{
[[AppDelegate shared] showLoading];
if (videoPlayer.rate !=0 && videoPlayer.error == nil && videoPlayer.status == AVPlayerStatusReadyToPlay) {
[[AppDelegate shared]removeLoading];
[myTimer invalidate];
myTimer=nil;
}
}
pragma mark- PreviousAction
- (IBAction)previousButtonAction:(id)sender {
if (_currentIndex == 0) {
}
else{
_currentIndex = _currentIndex-1;
NSURL *url=[NSURL URLWithString:_videosArray[_currentIndex]];
AVAsset *asset = [AVAsset assetWithURL:url];
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:asset];
[videoPlayer replaceCurrentItemWithPlayerItem:item];
[videoPlayer play];
[self changePage:UIPageViewControllerNavigationDirectionReverse];
[self addTimer];
}
}
pragma mark- NextAction
- (IBAction)nextButtonAction:(id)sender {
if (_currentIndex == _videosArray.count-1) {
}
else{
_currentIndex = _currentIndex+1;
NSURL *url=[NSURL URLWithString:_videosArray[_currentIndex]];
AVAsset *asset = [AVAsset assetWithURL:url];
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:asset];
[videoPlayer replaceCurrentItemWithPlayerItem:item];
[videoPlayer play];
[self changePage:UIPageViewControllerNavigationDirectionForward];
[self addTimer];
}
}
Here the player is playing well but with small delay. . How to load next coming video while playing current video without delay.
I'm a bit rusty and the iOS documentation on the whole of AVFoundation is woefully inadequate if you want to do anything slightly complicated. Its been a while since I did this but something like this should work (I'm assuming you have an NSArray*, pAssets containing all your videos as AVAsset*)
// Define the composition.
AVMutableComposition* pComposition = [AVMutableComposition composition];
// Define the tracks in the composition.
AVMutableCompositionTrack* pCompositionVideoTrack = [pComposition addMutableTrackWithMediaType: AVMediaTypeVideo preferredTrackID: 1];
AVMutableCompositionTrack* pCompositionAudioTrack = [pComposition addMutableTrackWithMediaType: AVMediaTypeAudio preferredTrackID: 2];
CMTime time = kCMTimeZero;
for ( AVAsset* pAssetsAsset in pAssets )
{
// Grab first video and audio tracks
AVAssetTrack* pAssetsAssetVideoTrack = [pAssetsAsset tracksWithMediaType: AVMediaTypeVideo].firstObject;
AVAssetTrack* pAssetsAssetAudioTrack = [pAssetsAsset tracksWithMediaType: AVMediaTypeAudio].firstObject;
// Get time range of entire video.
CMTimeRange timeRange = CMTimeRangeMake( kCMTimeZero, timepAssetsAsset.duration );
// Insert the entire video and audio into their respective tracks at "time".
NSError* pVideoError = nil;
NSError* pAudioError = nil;
[pCompositionVideoTrack insertTimeRange: timeRange ofTrack: pAssetsAssetVideoTrack atTime: time error: &pVideoError];
[pCompositionAudioTrack insertTimeRange: timeRange ofTrack: pAssetsAssetAudioTrack atTime: time error: &pAudioError];
// Move time along appropriately.
time = CMTimeAdd( time, pAssetsAsset.duration );
}
If you then pass the AVMutableComposition is derived from AVAsset so you can use this as normal and drop it into an AVPlayer (via an AVPlayerItem).
Now you can seek to any point in the video. Store the start/finish points of each video and you can easily seek to it.
Edit: Its ultra simple to use AVPlayer. First you need to create an AVPlayerItem .. and then you need to play it.
AVPlayerItem* pPlayerItem = [AVPlayerItem playerItemWithAsset: pComposition];
AVPlayer* pPlayer = [AVPlayer playerWithPlayerItem: pPlayerItem];
Now you need to attach it to a view's layer. So from inside your ViewController do something like this:
AVPlayerLayer* pLayer = [AVPlayerLayer playerLayerWithPlayer: pPlayer];
[self.view.layer addSublayer: pLayer];
Use AVQueuePlayer instead of simple AVPlayer. At the end of current video it will preload next video.
See how to use AVQueuePlayer here: https://stackoverflow.com/a/22785665/1271424

Playing a video recorded with AVFoundation

I would like to use the same button to start and stop recording. I would like to use another button to play back the recording. Here is what I have:
- (IBAction)recordVideo:(id)sender {
if(!self.movieOutput.isRecording) {
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"output.mp4"];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
[self.movieOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputPath]
recordingDelegate:self];
Float64 maximumVideoLength = 5; //Whatever value you wish to set as the maximum, in seconds
int32_t prefferedTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(maximumVideoLength, prefferedTimeScale);
self.movieFileOutput.maxRecordedDuration = maxDuration;
self.movieFileOutput.minFreeDiskSpaceLimit = 1024*1024;
}
else
{
[self.movieOutput stopRecording];
}
- (void) captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Recording to file ended");
[_captureSession stopRunning];
}
Then to play:
- (IBAction)playVideo:(id)sender {
NSURL *fileURL = [NSURL URLWithString:#"outputPath"];
self.avPlayer = [AVPlayer playerWithURL:fileURL];
AVPlayerLayer *movieLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
movieLayer.frame = self.cameraView.bounds;
movieLayer.videoGravity = AVLayerVideoGravityResize;
[self.cameraView.layer addSublayer:movieLayer];
[_avPlayer play];
When I run and press the playback button I get no errors and I see no avplayer.
You are recording and saving file in temporary directory
NSString *outputPath = [NSTemporaryDirectory()stringByAppendingPathComponent:#"output.mp4"];
and trying to play from bundle path.Use the same path to play recording also.
First, check Is your video is recorded and saved properly or not.From your code, the video is saved Temporary directory.Check the video at the Path.If it is exist or not.
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"output.mp4"];
NSLog(#"%#", outputPath);
In your code, you are trying to play video from outPutPath, which is not defined and initialize in your code.If you have defined outPutPath as property or variable, then you need to initialise _outPutPath, with the same path you save the video.
NSString *outputPath = [NSTemporaryDirectory()stringByAppendingPathComponent:#"output.mp4"];
_outputPath = outputPath;
To Play Video Try this,
if ([[NSFileManager defaultManager]fileExistsAtPath: _ouputPath]) {
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:_ouputPath]];
_avPlayer = [[AVPlayer alloc]initWithPlayerItem:[[AVPlayerItem alloc]initWithAsset:asset]];
AVPlayerLayer *movieLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
movieLayer.frame = self.cameraView.bounds;
[self.cameraView.layer addSublayer:movieLayer];
[self.avPlayer play];
}
Replace your this line :
NSURL *url = [NSURL fileURLWithPath:filePath];
with this: 
NSURL *url=[NSURL URLWithString:filePath];
 & then try.

How to mute audio using MPMoviePlayerController?

MPMoviePlayerController * _moviePlayerController = [[MPMoviePlayerController alloc] init];
_moviePlayerController.controlStyle = MPMovieControlStyleDefault;
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"test" ofType:#"mp4"]];
_moviePlayerController.contentURL = url;
_moviePlayerController.fullscreen=NO;
_moviePlayerController.backgroundView.backgroundColor = [UIColor whiteColor];
for(UIView *aSubView in _moviePlayerController.view.subviews) {
aSubView.backgroundColor = [UIColor whiteColor];
}
// Rotating the player to landscape position
_moviePlayerController.view.frame = CGRectMake(0.0f,
0.0f,
[UIScreen mainScreen].bounds.size.width,
[UIScreen mainScreen].bounds.size.height);
// _moviePlayerController.view.transform = CGAffineTransformMakeRotation(M_PI_2);
_moviePlayerController.view.center = self.view.center;
UIView *playerView = _moviePlayerController.view;
playerView.frame = CGRectMake(0.0f,
0.0f,
[UIScreen mainScreen].bounds.size.width,
[UIScreen mainScreen].bounds.size.height);
[videoPlayerView addSubview:playerView];
_moviePlayerController.shouldAutoplay=NO;
[_moviePlayerController prepareToPlay];
[_moviePlayerController pause];
Its not possible to mute your audio player by using MPMoviePlayerController. If you want to mute your audio player you should add AVFoundation framework. Try this code
AVURLAsset * asset = [AVURLAsset URLAssetWithURL:[self localMovieURL] options:nil];
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
// Mute all the audio tracks
NSMutableArray * allAudioParams = [NSMutableArray array];
for (AVAssetTrack *track in audioTracks) {
AVMutableAudioMixInputParameters *audioInputParams =[AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:0.0 atTime:kCMTimeZero ];
[audioInputParams setTrackID:[track trackID]];
[allAudioParams addObject:audioInputParams];
}
AVMutableAudioMix * audioZeroMix = [AVMutableAudioMix audioMix];
[audioZeroMix setInputParameters:allAudioParams];
// Create a player item
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:asset];
[playerItem setAudioMix:audioZeroMix]; // Mute the player item
// Create a new Player, and set the player to use the player item
// with the muted audio mix
AVPlayer *player = [AVPlayer playerWithPlayerItem:playerItem];
self.mPlayer = player;
[mPlayer play];

Can I mute/unmute an UIWebview in some way?

I have a UIWebView which generates sound, but I want to remove it. How can I mute the audio output in a UIWebView?
I've found the way to mute/unmute single player UIWebView. Any time UIWebView starts video AVPlayerItemBecameCurrentNotification notification fired. So handle it and track current AVPlayerItem to control playback.
-(void)viewDidLoad
{
…
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(playerItemBecameCurrent:)
name:#"AVPlayerItemBecameCurrentNotification"
object:nil];
}
-(void)playerItemBecameCurrent:(NSNotification*)notification
{
if ([notification.object isKindOfClass:[AVPlayerItem class]]) {
self.currentItem = (AVPlayerItem*)notification.object;
}
}
-(void)muteSound:(BOOL)mute
{
AVAsset *asset = self.currentItem.asset;
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *allAudioParams = [NSMutableArray array];
for (AVAssetTrack *track in audioTracks)
{
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:!mute atTime:kCMTimeZero];
[audioInputParams setTrackID:[track trackID]];
[allAudioParams addObject:audioInputParams];
}
AVMutableAudioMix *audioZeroMix = [AVMutableAudioMix audioMix];
[audioZeroMix setInputParameters:allAudioParams];
self.currentItem.audioMix = audioZeroMix;
}
This will be the easiest way.
-(void)stopAudio
{
[webView loadRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:#"" ]]];
}

Finding nearest zero-crossing in an AVURLAsset

I'm trying to find a way to fade in an audio track held locally without an audio glitch. I'm using AVPlayer and referencing mp3s from the iPodLibrary using AVURLAsset. The following method works most of the time but not all the time so I'm thinking I need to scan through the audio data looking for the nearest volume zero-crossing and do my fade from there. Any pointers would be much appreciated.
float duration = 0.5;
AVAsset *asset = [self.av_Player.currentItem asset];
NSArray *keys = [NSArray arrayWithObject:#"tracks"];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^(void) {
NSError *error = nil;
NSTimeInterval now = [self currentPlaybackTime];
CMTime mainFadeIn = CMTimeMakeWithSeconds(now, 6000);
CMTime mainFadeDuration = CMTimeMakeWithSeconds(duration, 6000);
CMTimeRange timerange = CMTimeRangeMake(mainFadeIn, mainFadeDuration);
AVKeyValueStatus trackStatus = [asset statusOfValueForKey:#"tracks" error:&error];
switch (trackStatus) {
case AVKeyValueStatusLoaded:
if( self.av_Player ) {
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVMutableAudioMixInputParameters *volumeMixInput = [AVMutableAudioMixInputParameters audioMixInputParameters];
[volumeMixInput setVolumeRampFromStartVolume:0.0 toEndVolume:tovolume timeRange:timerange];
[volumeMixInput setTrackID:[[tracks objectAtIndex:0] trackID]];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
[audioMix setInputParameters:[NSArray arrayWithObject:volumeMixInput]];
[self.av_Player.currentItem setAudioMix:audioMix];
}
break;
default:
break;
}
}];
No idea why I was getting glitches with the above method but assume it might have something to do with using blocks. Anyway I used the code below and it now works smoothly for fade in and fade outs - hope this helps others.
NSTimeInterval now = [self currentPlaybackTime];
AVPlayerItem *playerItem = self.av_Player.currentItem;
AVAsset *asset = playerItem.asset;
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *allAudioParams = [NSMutableArray array];
for (AVAssetTrack *track in audioTracks) {
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:fromvolume atTime:CMTimeMakeWithSeconds(now-0.1, 6000)];
[audioInputParams setVolume:fromvolume atTime:CMTimeMakeWithSeconds(now, 6000)];
[audioInputParams setVolume:tovolume atTime:CMTimeMakeWithSeconds(now+duration, 6000)];
[audioInputParams setTrackID:[track trackID]];
[allAudioParams addObject:audioInputParams];
}
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
[audioMix setInputParameters:allAudioParams];
[playerItem setAudioMix:audioMix];

Resources