issue in merging audio and video file in ios - ios

error is Terminating app due to uncaught exception 'NSRangeException', reason: '*** -[__NSArrayM objectAtIndex:]: index 0 beyond bounds for empty array'
line :-[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
My code here ------------------------------
- (void) getAndPlayAVideoTest
{
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
NSString *videoOutputPath=[bundleDirectory stringByAppendingPathComponent:#"abc.mp3"];
NSString *outputFilePath = [bundleDirectory stringByAppendingPathComponent:#"final_video.mp4"];
if ([[NSFileManager defaultManager]fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager]removeItemAtPath:outputFilePath error:nil];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
NSString *filePath = [bundleDirectory stringByAppendingPathComponent:#"abc.mov"];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:filePath];
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (_assetExport.status == AVAssetExportSessionStatusCompleted) {
//Write Code Here to Continue
}
else {
//Write Fail Code here
}
}
];
//}
}

In that case [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil]; this cannot read array.
Because Sound/Audio is not present in the particular file.This Error is comes only on those cases,When sound/Audio is not present in the multimedia file.

Try using
AVURLAsset* audioAsset = [AVURLAsset URLAssetWithURL:audio_inputFileUrl options:nil];

Related

How do I add a watermark (text or image) to a video?

I'm doing some video manipulation in my app. Currently this code takes a user generated video, and adds a sound to the video, then exports it in the same quality. This works well, but I can't figure out how to add a watermark to the video. I'm happy to do it with a UIIMage, or text layer, I just want it to say the name of my app on the video, without losing quality. Does anyone know how I can augment this code so that it adds a watermark?
I am working in Objective-C, not Swift.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL *audioPath = [[NSBundle mainBundle] URLForResource:#"sound" withExtension:#"mp3"];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioPath options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:self.videoUrl options:nil];
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// add video
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
// add video audio
AVMutableCompositionTrack *videoSoundTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoSoundTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// add sound
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:self.avPlayer.currentTime error:nil];
CGSize sizeOfVideo = [compositionVideoTrack naturalSize];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *savePath = [docsDir stringByAppendingPathComponent:#"video.mov"];
NSURL *savetUrl = [NSURL fileURLWithPath:savePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:savePath]) {
[[NSFileManager defaultManager] removeItemAtPath:savePath error:nil];
[[NSFileManager defaultManager] removeItemAtURL:savetUrl error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = savetUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:self.view animated:YES];
});
switch (_assetExport.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog (#"FAIL %#",_assetExport.error);
break;
}
case AVAssetExportSessionStatusCompleted:
{
dispatch_async(dispatch_get_main_queue(), ^{
// work with the video
});
break;
}
case AVAssetExportSessionStatusCancelled:
{
NSLog (#"CANCELED");
break;
}
}
NSLog(#"Export Status %d-- %#", _assetExport.status, _assetExport.outputURL);
}
];

Insert audio clip in original audio file at given offset

I am developing an application which has a feature of recording. Now I need to edit it with another clip at specific time location. i.e I have recording of 20 secs, now want to replace it with another audio file at 10th sec with the duration of 5 secs, so my exported audio file will be like first 10sec is my recorded audio, 10th - 15th sec is the clip which I have to replace with original one, and then 15th - 20th sec is again my recorded file.
please help me to find any nearby solution
Thanks in advance.
Here is my code `
-(void)mergeAndSaveWithVideoURLAtFirstOffset:(CMTime)offset1 andSecondOffset:(CMTime)offset2 {
NSURL *video_url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"Source" ofType:#"mp4"]];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL *source_audio_url=[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"Source" ofType:#"mp3"]];
NSURL *audio_url=[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"Asteroid_Sound" ofType:#"mp3"]];
audioAsset = [[AVURLAsset alloc]initWithURL:audio_url options:nil];
source_audioAsset = [[AVURLAsset alloc]initWithURL:source_audio_url options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, source_audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[source_audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:offset1 error:nil];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:offset2 error:nil];
videoAsset = [[AVURLAsset alloc]initWithURL:video_url options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.mov"]];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:_assetExport];
});
}];
}`
You have to first trim audio from particular offset may be this url will be helpfull you.
iOS Audio Trimming
After then merge your another audio.

Is it possible to merge two video files to one file, one screen in iOS?

I'm new to video programming. I'm trying to exercise it but I'm having trouble, which merges two video files to one.
The merge I mean is as follows..
I have first video like this
Second video also like this
I want them to merge like this
I didn't want to use 2 video players because I want to send the merged video file to someone. I searched all day to solve this, but I could't find how to.
I wrote code referencing this link but it shows first video only, not merged.
My Code:
NSURL *firstURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video1" ofType:#"mp4"]]
AVURLAsset *firstAsset = [[AVURLAsset alloc]initWithURL:firstURL options:nil];
NSURL *secondURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video2" ofType:#"mp4"]];
VURLAsset *secondAsset = [[AVURLAsset alloc]initWithURL:secondURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[secondTrack setPreferredTransform:CGAffineTransformMakeScale(0.25f,0.25f)];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.mov"]];
NSLog(#"%#", outputFilePath);
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = #"com.apple.quicktime-movie";
assetExport.outputURL = outputFileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
default:
break;
}
}
];
What am I missing? I don't know how I can approach this to solve the problem.
Appreciate any ideas.
Thanks.
Edit:
i made a new code which referenced a link matt wrote, thanks matt. but when i tried to export it, only first video was exported. not together.. :(
my new code is..
NSURL *originalVideoURL1 = [[NSBundle mainBundle] URLForResource:#"video1" withExtension:#"mov"];
NSURL *originalVideoURL2 = [[NSBundle mainBundle] URLForResource:#"video2" withExtension:#"mov"];
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:originalVideoURL1 options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:originalVideoURL2 options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; //[AVMutableComposition composition];
NSError *error = nil;
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"firstTrack error!!!. %#", error.localizedDescription);
}
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"secondTrack error!!!. %#", error.localizedDescription);
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *firstLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.7, 0.7);
CGAffineTransform move = CGAffineTransformMakeTranslation(230, 230);
[firstLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *secondLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2, 1.5);
CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0, 0);
[secondLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];
mainInstruction.layerInstructions = #[firstLayerInstruction, secondLayerInstruction];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = #[mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(640, 480);
AVPlayerItem *newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = mainCompositionInst;
AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
AVPlayerLayer *playerLayer =[AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:self.view.bounds];
[self.view.layer addSublayer:playerLayer];
[player seekToTime:kCMTimeZero];
[player play]; // play is Good!!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *tempS2 = [documentsDirectory stringByAppendingPathComponent:#"FinalVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:tempS2])
{
[[NSFileManager defaultManager] removeItemAtPath:tempS2 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: tempS2];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
NSLog(#"%#", [exportSession supportedFileTypes]);
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status==AVAssetExportSessionStatusFailed) {
NSLog(#"failed");
}
else {
NSLog(#"AudioLocation : %#",tempS2);
}
}];
how can i export my mixComposition and layerInstruction both?
please give me a little more ideas.
Thanks.
With reference to the code in your second edit, just as you've told the AVPlayerItem about your AVMutableVideoComposition, you need to also tell the AVAssetExportSession too:
exportSession.videoComposition = mainCompositionInst;
// exportAsynchronouslyWithCompletionHandler etc
N.B. make sure you choose the longer of the two track durations when setting your instruction duration:
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMaximum(firstAsset.duration, secondAsset.duration));
AVPlayer doesn't mind if you get this wrong, but AVAssetExportSession does and will return an AVErrorInvalidVideoComposition (-11841) error.
N.B. 2 Your AVPlayer isn't actually going out of scope, but it makes me nervous when I look at it. I'd assign it to a property if I were you.

merging 2 audio files caf using AVFoundation in iOS 9

Hi I am just facing this issue in iOS 9.. I am trying to merge 2 caf files and all was working fine in iOS 8 but when I test app on iOS 9 it is not working expectedly.. only one file played when I play final merged file.. Any kind of help will be appreciatedA
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:1.0];
NSURL *url = [NSURL fileURLWithPath:[url3 path]];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack1 setPreferredVolume:1.0];
NSURL *url1 = [NSURL fileURLWithPath:[url12 path]];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
NSLog(#"AVAssetExportSessionStatusCompleted");
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSData *newAudiofound = [NSData dataWithContentsOfURL:exportSession.outputURL];
[newAudiofound writeToFile:[finalURLAudio path] atomically:YES];
[self playAudio:exportSession.outputURL];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
-(void)mergesound{
NSURL *audio_url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"1" ofType:#"m4a"]];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
CMTime currentTime = kCMTimeZero;
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:1.0];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:audio_url options:nil];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:audio_url options:nil];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
currentTime = CMTimeAdd(currentTime, avAsset.duration);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:currentTime error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.m4a"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
exportSession.outputURL = [NSURL fileURLWithPath:outputFilePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
NSLog(#"AVAssetExportSessionStatusCompleted");
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
NSData *newAudiofound = [NSData dataWithContentsOfURL:exportSession.outputURL];
[newAudiofound writeToFile:outputFilePath atomically:YES];
// [self playAudio:exportSession.outputURL];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *path1;
NSString *path2;
path1 = [NSString stringWithFormat:#"%#/firstPart.caf",documentsDirectory];
path2 = [NSString stringWithFormat:#"%#/newRecording.caf",documentsDirectory];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path1] options:nil];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path2] options:nil];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
if (version < 9.0) {
[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
}
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack1;
AVAssetTrack *assetTrack2;
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
CMTime insertionPoint = kCMTimeZero;
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:nil];
insertionPoint = CMTimeAdd(insertionPoint, avAsset1.duration);
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:assetTrack2 atTime:insertionPoint error:nil];
NSURL *outPutUrl = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.caf",documentsDirectory]];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = outPutUrl;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
[self mergeAudioPart2];
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
NSLog(#"%ld",(long)exportSession.status);
NSLog(#"AVAssetExportSessionStatusFailed");
}
else
{
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
replace your code with this.

How can merge audio and video in iphone sdk

I am creating a project in which, i merge video and audio using following code in objective c.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *audio_inputFilePath = [libraryDirectory stringByAppendingPathComponent:#"SelectedSong.wav"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
NSString *videoOutputPath = [libraryDirectory stringByAppendingPathComponent:#"Video.mp4"];
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
//_assetExport.outputFileType = AVFileTypeMPEG4 ;
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
};
but
[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
it return empty array in device.
when check it in simulator it work fine .
so please help me how can resolve it.
In that case
[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil]; this cannot read array.
Because Sound/Audio is not present in the particular file.This Error is comes only on those cases,When sound/Audio is not present in the perticular multimedia file.

Resources