How to merge multiple videos using AVAsset in iOS? - ios

I want to merge multiple videos into one.
But i don't know how to do this so please help me for this issue.
Below is my code but its not working for me.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
}];
I have searched for this but no answer worked for me.

Check my code below this works correct...
-(void)MergeVideo
{
AppDelegate *appdel = (AppDelegate*)[[UIApplication sharedApplication] delegate];
NSLog(#"Array Video Paths :- %#",appdel.arrVideoPath);
CGFloat totalDuration;
totalDuration = 0;
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
for (id object in appdel.arrVideoPath)
{
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:object]];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
[videoTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:insertTime
error:nil];
[audioTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:insertTime
error:nil];
insertTime = CMTimeAdd(insertTime,asset.duration);
}
NSString* documentsDirectory= [self applicationDocumentsDirectory];
myDocumentPath= [documentsDirectory stringByAppendingPathComponent:#"merge_video.mp4"];
urlVideoMain = [[NSURL alloc] initFileURLWithPath: myDocumentPath];
if([[NSFileManager defaultManager] fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager] removeItemAtPath:myDocumentPath error:nil];
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = urlVideoMain;
exporter.outputFileType = #"com.apple.quicktime-movie";
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status])
{
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCancelled:
break;
case AVAssetExportSessionStatusCompleted:
break;
default:
break;
}
}];
}
- (NSString*) applicationDocumentsDirectory
{
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}

{
AVURLAsset *video01;
AVURLAsset *video02;
CGFloat totalDuration;
totalDuration = 0; //initialization, keep it 0
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
for (int i = 0; i < [arrVideoPath count]; i++) //arrVideoPath contains all video paths
{
if (i == 0)
{
video02 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[arrVideoPath objectAtIndex:i]] options:nil];
[composedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video02.duration) ofTrack:[[video02 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
else
{
video01 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[arrVideoPath objectAtIndex:i-1]] options:nil];
video02 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[arrVideoPath objectAtIndex:i]] options:nil];
float duration1 = CMTimeGetSeconds([video01 duration]);
totalDuration = totalDuration + duration1;
CMTime time1 = CMTimeMakeWithSeconds(totalDuration, 1);
[composedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video02.duration) ofTrack:[[video02 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:time1 error:nil];
}
NSString* documentsDirectory= [self applicationDocumentsDirectory];
myDocumentPath= [documentsDirectory stringByAppendingPathComponent:#"merge_video.mp4"]; //myDocumentPath is NSString that gives path of output video(combined video)
urlVideoMain = [[NSURL alloc] initFileURLWithPath: myDocumentPath]; //urlVideoMain is Url of output video.
if([[NSFileManager defaultManager] fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager] removeItemAtPath:myDocumentPath error:nil];
} //removes previous video at same path, essential
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = urlVideoMain;
exporter.outputFileType = #"com.apple.quicktime-movie";
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCancelled:
break;
case AVAssetExportSessionStatusCompleted:
break;
default:
break;
}
}];
}
-(NSString*) applicationDocumentsDirectory
{
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}

Related

How do I add a watermark (text or image) to a video?

I'm doing some video manipulation in my app. Currently this code takes a user generated video, and adds a sound to the video, then exports it in the same quality. This works well, but I can't figure out how to add a watermark to the video. I'm happy to do it with a UIIMage, or text layer, I just want it to say the name of my app on the video, without losing quality. Does anyone know how I can augment this code so that it adds a watermark?
I am working in Objective-C, not Swift.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL *audioPath = [[NSBundle mainBundle] URLForResource:#"sound" withExtension:#"mp3"];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioPath options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:self.videoUrl options:nil];
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// add video
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
// add video audio
AVMutableCompositionTrack *videoSoundTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoSoundTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// add sound
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:self.avPlayer.currentTime error:nil];
CGSize sizeOfVideo = [compositionVideoTrack naturalSize];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *savePath = [docsDir stringByAppendingPathComponent:#"video.mov"];
NSURL *savetUrl = [NSURL fileURLWithPath:savePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:savePath]) {
[[NSFileManager defaultManager] removeItemAtPath:savePath error:nil];
[[NSFileManager defaultManager] removeItemAtURL:savetUrl error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = savetUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:self.view animated:YES];
});
switch (_assetExport.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog (#"FAIL %#",_assetExport.error);
break;
}
case AVAssetExportSessionStatusCompleted:
{
dispatch_async(dispatch_get_main_queue(), ^{
// work with the video
});
break;
}
case AVAssetExportSessionStatusCancelled:
{
NSLog (#"CANCELED");
break;
}
}
NSLog(#"Export Status %d-- %#", _assetExport.status, _assetExport.outputURL);
}
];

AVAssetExportSessionStatusFailed While Merging Audio files in IOS 9

i have using this code which is working perfectly fine on previous version of IOS 9. but on IOS 9 version it is always going AVAssetExportSessionStatusFailed
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/firstPart.caf",docsDir]] options:nil];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/New-Recording.caf",docsDir]] options:nil];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack1;
AVAssetTrack *assetTrack2;
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
CMTime insertionPoint = kCMTimeZero;
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:nil];
insertionPoint = CMTimeAdd(insertionPoint, avAsset1.duration);
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:assetTrack2 atTime:insertionPoint error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.caf",documentsDirectory]];
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
[self mergeAudioPart2];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
i searched all over internet but, i didn't find any solution any help will be appreciated.
Your code should work, but without seeing your input files it's hard to tell. However it can still be improved.
You've got an unused mutable track for some reason. You can delete it:
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
// This track is unused. Delete it!
// [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
When the export fails (AVAssetExportSessionStatusFailed == exportSession.status) check exportSession.error for more information. AVFoundation error messages often leave much to be desired, but you could be lucky.
You're exporting an m4a file, but the file suffix is .caf. Change it to .m4a (AVAssetExportSession doesn't appear to support exporting to AVFileTypeCoreAudioFormat).
Make sure to always remove the output URL before exporting, you've forgotten to do this:
NSURL *ouputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.m4a",documentsDirectory]];
[[NSFileManager defaultManager] removeItemAtURL:ouputURL error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = ouputURL;
exportSession.outputFileType = AVFileTypeAppleM4A;
insertTimeRange:ofTrack:error: returns a success flag and an error! Consult them! They might point to a problem with your code!
NSError *error;
if (![track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:&error]) {
NSLog(#"ERROR 1: %#", error);
}
This condition is strange. You check the existence of audio tracks in one asset, then use the other as well:
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
the issue was here :
// [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; this line was adding extra track. now this code is working perfect on IOS 9 as well.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *path1 = [NSString stringWithFormat:#"%#/firstPart.caf",documentsDirectory];
NSString *path2 = [NSString stringWithFormat:#"%#/New-Recording.caf",documentsDirectory];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path1] options:nil];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path2] options:nil];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
// [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack1;
AVAssetTrack *assetTrack2;
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
NSError *error;
CMTime insertionPoint = kCMTimeZero;
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:nil];
insertionPoint = CMTimeAdd(insertionPoint, avAsset1.duration);
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:assetTrack2 atTime:insertionPoint error:nil];
NSURL *outPutUrl = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.caf",documentsDirectory]];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = outPutUrl;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
[self mergeAudioPart2];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"%ld",(long)exportSession.status);
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];

merging 2 audio files caf using AVFoundation in iOS 9

Hi I am just facing this issue in iOS 9.. I am trying to merge 2 caf files and all was working fine in iOS 8 but when I test app on iOS 9 it is not working expectedly.. only one file played when I play final merged file.. Any kind of help will be appreciatedA
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:1.0];
NSURL *url = [NSURL fileURLWithPath:[url3 path]];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack1 setPreferredVolume:1.0];
NSURL *url1 = [NSURL fileURLWithPath:[url12 path]];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
NSLog(#"AVAssetExportSessionStatusCompleted");
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSData *newAudiofound = [NSData dataWithContentsOfURL:exportSession.outputURL];
[newAudiofound writeToFile:[finalURLAudio path] atomically:YES];
[self playAudio:exportSession.outputURL];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
-(void)mergesound{
NSURL *audio_url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"1" ofType:#"m4a"]];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
CMTime currentTime = kCMTimeZero;
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:1.0];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:audio_url options:nil];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:audio_url options:nil];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
currentTime = CMTimeAdd(currentTime, avAsset.duration);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:currentTime error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.m4a"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
exportSession.outputURL = [NSURL fileURLWithPath:outputFilePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
NSLog(#"AVAssetExportSessionStatusCompleted");
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
NSData *newAudiofound = [NSData dataWithContentsOfURL:exportSession.outputURL];
[newAudiofound writeToFile:outputFilePath atomically:YES];
// [self playAudio:exportSession.outputURL];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *path1;
NSString *path2;
path1 = [NSString stringWithFormat:#"%#/firstPart.caf",documentsDirectory];
path2 = [NSString stringWithFormat:#"%#/newRecording.caf",documentsDirectory];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path1] options:nil];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path2] options:nil];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
if (version < 9.0) {
[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
}
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack1;
AVAssetTrack *assetTrack2;
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
CMTime insertionPoint = kCMTimeZero;
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:nil];
insertionPoint = CMTimeAdd(insertionPoint, avAsset1.duration);
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:assetTrack2 atTime:insertionPoint error:nil];
NSURL *outPutUrl = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.caf",documentsDirectory]];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = outPutUrl;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
[self mergeAudioPart2];
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
NSLog(#"%ld",(long)exportSession.status);
NSLog(#"AVAssetExportSessionStatusFailed");
}
else
{
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
replace your code with this.

AVMutableCompositionTrack setVolume not working

I'm having problem to set the audio volume when mixing a recorded video and an audio from resource.
Here is my code:
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
NSString *resourcePath = [[NSBundle mainBundle] pathForResource:#"give-it-away" ofType:#"mp3"];
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:resourcePath] options:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES],AVURLAssetPreferPreciseDurationAndTimingKey, nil]];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[videoAsset.tracks objectAtIndex:0] atTime:kCMTimeZero error:nil];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTime) ofTrack:[audioAsset.tracks objectAtIndex:0] atTime:kCMTimeZero error:&videoError];
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioTrack] ;
[audioInputParams setVolume:0.3 atTime:kCMTimeZero];
[audioInputParams setTrackID:audioTrack.trackID];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithObject:audioInputParams];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = [NSURL fileURLWithPath:finalVideoWithAudioPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.audioMix = audioMix;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
[self performSelectorOnMainThread:#selector(doPostExportFailed) withObject:nil waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCompleted:
{
[self performSelectorOnMainThread:#selector(doPostExportSuccess) withObject:nil waitUntilDone:YES];
break;
}
};
}];
The export completes successfully, but the audio volume does not change.
What am I doing wrong?
Thanks
UPDATED (December 2018)
This code is working for me on iOS 12.1.2:
- (void) combineAudio:(NSString*)audioPath forRecord:(VideoRecord*)record isResource:(BOOL)isResource isSilent:(BOOL)isSilent keepCurrentAudio:(BOOL)keepCurrentAudio withCompletionHandler:(void (^)(AVAssetExportSession* exportSession, NSString* exportPath))handler {
NSString *resourcePath = audioPath;
if (isResource) {
resourcePath = [[NSBundle mainBundle] pathForResource:resourcePath ofType:#"mp3"];
}
NSURL *url = [NSURL fileURLWithPath:resourcePath];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL URLWithString:[NSString stringWithFormat:#"file://%#",record.videoPath]] options:nil];
AVMutableComposition *composition = [self getComposition];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioOriginalTrack = nil;
if (keepCurrentAudio && [videoAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) {
compositionAudioOriginalTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioOriginalTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
if (isResource) {
CMTime videoDuration = videoAsset.duration;
if(CMTimeCompare(videoDuration, audioAsset.duration) == -1){
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
} else if(CMTimeCompare(videoDuration, audioAsset.duration) == 1) {
CMTime currentTime = kCMTimeZero;
while(YES){
CMTime audioDuration = audioAsset.duration;
CMTime totalDuration = CMTimeAdd(currentTime,audioDuration);
if(CMTimeCompare(totalDuration, videoDuration)==1){
audioDuration = CMTimeSubtract(totalDuration,videoDuration);
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioDuration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:currentTime error:nil];
currentTime = CMTimeAdd(currentTime, audioDuration);
if(CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0){
break;
}
}
}
} else {
NSArray<AVAssetTrack *>* aTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
if (aTracks.count > 0) {
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[aTracks objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
}
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
if (tracks.count == 0) {
CLSNSLog(#"%# - combineAudio - video tracks zero", NSStringFromClass([self class]));
// TODO - Handle this error.
return;
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[tracks objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:[composition copy]
presetName:AVAssetExportPresetHighestQuality];
NSString *exportPath = [record.videoPath stringByReplacingOccurrencesOfString:#".mp4" withString:#"_audio_added.mp4"];
if ([record.videoPath containsString:#".MOV"]) {
exportPath = [record.videoPath stringByReplacingOccurrencesOfString:#".MOV" withString:#"_audio_added.mp4"];
}
if ([record.videoPath containsString:#".mov"]) {
exportPath = [record.videoPath stringByReplacingOccurrencesOfString:#".mov" withString:#"_audio_added.mp4"];
}
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
float volume = .5f;
if (keepCurrentAudio) {
volume = .6f;
}
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioTrack] ;
[audioInputParams setVolumeRampFromStartVolume:(isSilent ? .0f : volume) toEndVolume:(isSilent ? .0f : volume) timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[audioInputParams setTrackID:compositionAudioTrack.trackID];
NSArray *inputParams = [NSArray arrayWithObject:audioInputParams];
AVMutableAudioMixInputParameters *audioOriginalInputParams = nil;
if (keepCurrentAudio) {
audioOriginalInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioOriginalTrack] ;
[audioInputParams setVolumeRampFromStartVolume:(isSilent ? .0f : .06f) toEndVolume:(isSilent ? .0f : .06f) timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[audioInputParams setTrackID:compositionAudioOriginalTrack.trackID];
inputParams = [NSArray arrayWithObjects:audioInputParams, audioOriginalInputParams, nil];
}
audioMix.inputParameters = inputParams;
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.audioMix = audioMix;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
handler(_assetExport, exportPath);
}];
}
I call this function like this (sound is the name of the resource file):
[[FBVideoEditor shared] combineAudio:sound forRecord:self.record isResource:YES isSilent:NO withCompletionHandler:^(AVAssetExportSession *exportSession, NSString *exportPath) {
switch (exportSession.status)
{
case AVAssetExportSessionStatusCancelled:
CLSNSLog(#"AVAssetExportSessionStatusCancelled");
break;
case AVAssetExportSessionStatusExporting:
CLSNSLog(#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusUnknown:
CLSNSLog(#"AVAssetExportSessionStatusUnknown");
break;
case AVAssetExportSessionStatusWaiting:
CLSNSLog(#"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusFailed:
{
CLSNSLog(#"Export Failed with error messsage: %#", exportSession.error.userInfo );
break;
}
case AVAssetExportSessionStatusCompleted:
{
// Success
}
};
}];
The VideoRecord class contains all necessary data for my videos. But in this case, it only uses the video path, so you can change it for a simple NSString.
Hope it helps.

How to merge Audio and video using AVMutableCompositionTrack

In my application I need to merge audio and video and then I need to play the audio file in the Mediaplayer. How can I merge the audio and video in IOS. Is there is any source code for this. Please suggest me some ideas
Thanks in advance
Use this
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
}
}
];
You can merge videos by creating the Mutable composition.
AVMutableComposition* composition = [[AVMutableComposition alloc]init];
AVURLAsset* video1 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path1]options:nil];
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],#"MyAudio.m4a",nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
AVAsset *audioAsset = [AVAsset assetWithURL:outputFileURL];
//Create mutable composition of audio type
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,video1.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack* composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[composedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video1.duration)
ofTrack:[[video1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession*exporter = [[AVAssetExportSession alloc]initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
[exporter exportAsynchronouslyWithCompletionHandler:^{
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed to export video");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"export cancelled");
break;
}
For Video merging visit this tutorial. http://iosbucket.blogspot.in/2015/04/mp4-conversion-and-video-merging-in-ios.html
You can also find the sample project for merging videos.
http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios visit this tutorial for merging audio and video files
It’s a bit late to answer, but it can help someone in the future. repeats audio if video duration is larger than audio.
+ (void)mergeVideoWithAudio:(NSURL *)videoUrl audioUrl:(NSURL *)audioUrl success:(void (^)(NSURL *url))success failure:(void (^)(NSError *error))failure {
AVMutableComposition *mixComposition = [AVMutableComposition new];
NSMutableArray<AVMutableCompositionTrack *> *mutableCompositionVideoTrack = [NSMutableArray new];
NSMutableArray<AVMutableCompositionTrack *> *mutableCompositionAudioTrack = [NSMutableArray new];
AVMutableVideoCompositionInstruction *totalVideoCompositionInstruction = [AVMutableVideoCompositionInstruction new];
AVAsset *aVideoAsset = [AVAsset assetWithURL:videoUrl];
AVAsset *aAudioAsset = [AVAsset assetWithURL:audioUrl];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if (videoTrack && audioTrack) {
[mutableCompositionVideoTrack addObject:videoTrack];
[mutableCompositionAudioTrack addObject:audioTrack];
AVAssetTrack *aVideoAssetTrack = [aVideoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *aAudioAssetTrack = [aAudioAsset tracksWithMediaType:AVMediaTypeAudio].firstObject;
if (aVideoAssetTrack && aAudioAssetTrack) {
[mutableCompositionVideoTrack.firstObject insertTimeRange:CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration) ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
CMTime videoDuration = aVideoAsset.duration;
if (CMTimeCompare(videoDuration, aAudioAsset.duration) == -1) {
[mutableCompositionAudioTrack.firstObject insertTimeRange:CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
} else if (CMTimeCompare(videoDuration, aAudioAsset.duration) == 1) {
CMTime currentDuration = kCMTimeZero;
while (CMTimeCompare(currentDuration, videoDuration) == -1) {
// repeats audio
CMTime restTime = CMTimeSubtract(videoDuration, currentDuration);
CMTime maxTime = CMTimeMinimum(aAudioAsset.duration, restTime);
[mutableCompositionAudioTrack.firstObject insertTimeRange:CMTimeRangeMake(kCMTimeZero, maxTime) ofTrack:aAudioAssetTrack atTime:currentDuration error:nil];
currentDuration = CMTimeAdd(currentDuration, aAudioAsset.duration);
}
}
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform;
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration);
}
}
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
}
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
// try to export the file and handle the status cases
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:
failure(exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
failure(exportSession.error);
break;
default:
success(outputURL);
break;
}
}];
}

Resources