Video cracked while merging videos using AVMutableComposition - ios

I'm merging the videos using AVMutableComposition with the below code,
- (void)MergeAndSave_internal{
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
NSLog(#"%#",videoPathArray);
float time = 0;
CMTime startTime = kCMTimeZero;
for (int i = 0; i<videoPathArray.count; i++) {
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
CGAffineTransform transform = sourceVideoTrack.preferredTransform;
videoComposition.renderSize = sourceVideoTrack.naturalSize;
if (size.width > size.height) {
[layerInstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time, 30)];
} else {
float s = size.width/size.height;
CGAffineTransform newe = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
float x = (size.height - size.width*s)/2;
CGAffineTransform newer = CGAffineTransformConcat(newe, CGAffineTransformMakeTranslation(x, 0));
[layerInstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time, 30)];
}
if(i==0){
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
}
ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceVideoTrack atTime:startTime error:&error];
ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceAudioTrack atTime:startTime error:nil];
if (!ok) {
{
[radialView4 setHidden:YES];
NSLog(#"Export failed: %#", [[self.exportSession error] localizedDescription]);
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Something Went Wrong :(" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
[radialView4 setHidden:YES];
break;
}
}
startTime = CMTimeAdd(startTime, [sourceAsset duration]);
}
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
instruction.timeRange = compositionVideoTrack.timeRange;
videoComposition.instructions = [NSArray arrayWithObject:instruction];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"RampMergedVideo.mov"]];
unlink([myPathDocs UTF8String]);
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPreset1280x720];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [exporter error]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:{
NSLog(#"Export successfully");
}
default:
break;
}
if (exporter.status != AVAssetExportSessionStatusCompleted){
NSLog(#"Retry export");
}
});
}];
}
But video looks cracked while saving to system and playing in quick time player. I think that the problem in CFAffline transform. Can anyone please advice ?
Here's the cracked screen in the middle of the video :

You haven't set videoComposition to the AVAssetExportSession. Try doing this exporter.videoComposition = videoComposition;. Havent tried this though but should work.

Related

Video Could Not Be Composed when using AVAssetExportPresetHighestQuality

I am tying to create an app that splices together a number of videos. The issue seems to be that when I combine instructions with AVAssetExportPresetHighestQuality I get an error stating that
Export failed -> Reason: The video could not be composed., User Info:
{
NSLocalizedDescription = "Operation Stopped";
NSLocalizedFailureReason = "The video could not be composed.";
NSUnderlyingError = "Error Domain=NSOSStatusErrorDomain Code=-17390 \"(null)\""; }
If I change it to AVAssetExportPresetPassthrough it works ok but the instructions are ignored. Does anyone know what the issue might be using the following code. Im nearly there but this issue is holding me up.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
NSMutableArray *arrayInstructions = [[NSMutableArray alloc] init];
int i = 0;
for (NSMutableDictionary * dict in self.arraySelectedAssets) {
AVAsset *asset = [dict objectForKey:#"avasset"];
//[self orientationForTrack:asset];
AVAssetTrack* videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack* audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:insertTime error:nil];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssetTrack atTime:insertTime error:nil];
AVMutableVideoCompositionInstruction *firstVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// Set the time range of the first instruction to span the duration of the first video track.
firstVideoCompositionInstruction.timeRange = CMTimeRangeMake(insertTime, videoAssetTrack.timeRange.duration);
AVMutableVideoCompositionLayerInstruction* firstVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
CGAffineTransform translateToCenter = CGAffineTransformMakeTranslation( 0,-1334);
CGAffineTransform rotateBy90Degrees = CGAffineTransformMakeRotation( M_PI_2);
CGAffineTransform shrinkWidth = CGAffineTransformMakeScale(0.1, 0.1); // needed because Apple does a "stretch" by default - really, we should find and undo apple's stretch - I suspect it'll be a CALayer defaultTransform, or UIView property causing this
CGAffineTransform finalTransform = CGAffineTransformConcat( shrinkWidth, CGAffineTransformConcat(translateToCenter, rotateBy90Degrees) );
[firstVideoLayerInstruction setTransform:finalTransform atTime:kCMTimeZero];
firstVideoCompositionInstruction.layerInstructions = #[firstVideoLayerInstruction];
[arrayInstructions addObject:firstVideoCompositionInstruction];
insertTime = CMTimeAdd(insertTime, videoAssetTrack.timeRange.duration);
i = i + 1;
}
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = arrayInstructions;
mutableVideoComposition.renderSize = CGSizeMake(1334, 750);
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
self.combinedVideoURL = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
self.timerExporter = [NSTimer scheduledTimerWithTimeInterval:0.01f
target:self
selector:#selector(exporterProgress)
userInfo:nil
repeats:YES];
// 5 - Create exporter
self.exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
self.exporter .outputURL=self.combinedVideoURL;
self.exporter .outputFileType = AVFileTypeQuickTimeMovie;
self.exporter .shouldOptimizeForNetworkUse = YES;
self.exporter.videoComposition = mutableVideoComposition;
[self.exporter exportAsynchronouslyWithCompletionHandler:^{
[self.timerExporter invalidate];
switch (self.exporter.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed -> Reason: %#, User Info: %#",
self.exporter.error.localizedFailureReason,
self.exporter.error.userInfo.description);
[self showError:self.exporter.error.localizedFailureReason];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export cancelled");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export finished");
dispatch_async(dispatch_get_main_queue(), ^{
self.labelProgressText.text = [NSString stringWithFormat:#"%# (100%%)", NSLocalizedString(#"Combining The Videos", nil)];
[self applyTheFilter];
});
break;
}
}];
This is not the answer you're looking for, I'm afraid. I had the same problem transforming and exporting a single video - AVAssetExportPresetHighestQuality would work for some assets and not for others.
My guess at the time was that the assets that didn't work weren't of a high enough size/framerate/quality to render using AVAssetExportPresetHighestQuality.
As you did, I ended up using AVAssetExportPresetPassthrough. In your case the end result will presumably be that all the assets you're splicing together will be rendered in their original format.

How to merge multiple videos using AVAsset in iOS?

I want to merge multiple videos into one.
But i don't know how to do this so please help me for this issue.
Below is my code but its not working for me.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
}];
I have searched for this but no answer worked for me.
Check my code below this works correct...
-(void)MergeVideo
{
AppDelegate *appdel = (AppDelegate*)[[UIApplication sharedApplication] delegate];
NSLog(#"Array Video Paths :- %#",appdel.arrVideoPath);
CGFloat totalDuration;
totalDuration = 0;
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
for (id object in appdel.arrVideoPath)
{
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:object]];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
[videoTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:insertTime
error:nil];
[audioTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:insertTime
error:nil];
insertTime = CMTimeAdd(insertTime,asset.duration);
}
NSString* documentsDirectory= [self applicationDocumentsDirectory];
myDocumentPath= [documentsDirectory stringByAppendingPathComponent:#"merge_video.mp4"];
urlVideoMain = [[NSURL alloc] initFileURLWithPath: myDocumentPath];
if([[NSFileManager defaultManager] fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager] removeItemAtPath:myDocumentPath error:nil];
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = urlVideoMain;
exporter.outputFileType = #"com.apple.quicktime-movie";
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status])
{
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCancelled:
break;
case AVAssetExportSessionStatusCompleted:
break;
default:
break;
}
}];
}
- (NSString*) applicationDocumentsDirectory
{
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}
{
AVURLAsset *video01;
AVURLAsset *video02;
CGFloat totalDuration;
totalDuration = 0; //initialization, keep it 0
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
for (int i = 0; i < [arrVideoPath count]; i++) //arrVideoPath contains all video paths
{
if (i == 0)
{
video02 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[arrVideoPath objectAtIndex:i]] options:nil];
[composedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video02.duration) ofTrack:[[video02 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
else
{
video01 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[arrVideoPath objectAtIndex:i-1]] options:nil];
video02 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[arrVideoPath objectAtIndex:i]] options:nil];
float duration1 = CMTimeGetSeconds([video01 duration]);
totalDuration = totalDuration + duration1;
CMTime time1 = CMTimeMakeWithSeconds(totalDuration, 1);
[composedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video02.duration) ofTrack:[[video02 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:time1 error:nil];
}
NSString* documentsDirectory= [self applicationDocumentsDirectory];
myDocumentPath= [documentsDirectory stringByAppendingPathComponent:#"merge_video.mp4"]; //myDocumentPath is NSString that gives path of output video(combined video)
urlVideoMain = [[NSURL alloc] initFileURLWithPath: myDocumentPath]; //urlVideoMain is Url of output video.
if([[NSFileManager defaultManager] fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager] removeItemAtPath:myDocumentPath error:nil];
} //removes previous video at same path, essential
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = urlVideoMain;
exporter.outputFileType = #"com.apple.quicktime-movie";
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCancelled:
break;
case AVAssetExportSessionStatusCompleted:
break;
default:
break;
}
}];
}
-(NSString*) applicationDocumentsDirectory
{
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}

Build AVMutableComposition from AVURLAssets in loop

I'm working on an app that will need to concat a group of videos recorded from camera. Ultimately I'll have an array of URL's to work with but I can't figure out how to get two movie assets to concat properly. Here's some standalone code:
- (void)buildComposition {
NSString *path1 = [[NSBundle mainBundle] pathForResource:#"IMG_1049" ofType:#"MOV"];
NSString *path2 = [[NSBundle mainBundle] pathForResource:#"IMG_1431" ofType:#"MOV"];
NSURL *url1 = [NSURL fileURLWithPath:path1];
NSURL *url2 = [NSURL fileURLWithPath:path2];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
NSMutableArray *layerInstructions = [NSMutableArray array];
CGSize renderSize = CGSizeZero;
NSUInteger count = 0;
for (NSURL *url in #[url1, url2]) {
NSDictionary *options = #{ AVURLAssetPreferPreciseDurationAndTimingKey: #(YES) };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
CMTimeRange editRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(1.0, 600));
NSError *error = nil;
CMTime insertionTime = composition.duration;
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = videoTracks.firstObject;
AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompositionTrack insertTimeRange:editRange ofTrack:videoTrack atTime:insertionTime error:&error];
if (count == 0) {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.6, 0.6);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
else {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.9, 0.9);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
// set the render size
CGRect transformed = CGRectApplyAffineTransform(CGRectMakeWithCGSize(videoTrack.naturalSize), videoTrack.preferredTransform);
renderSize = CGSizeUnion(renderSize, transformed.size);
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *audioTrack = audioTracks.firstObject;
AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompositionTrack insertTimeRange:editRange ofTrack:audioTrack atTime:insertionTime error:&error];
++count;
}
// set the composition instructions
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration);
compositionInstruction.layerInstructions = layerInstructions;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:composition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.instructions = #[compositionInstruction];
videoComposition.renderSize = renderSize;
// export the composition
NSTimeInterval time = [NSDate timeIntervalSinceReferenceDate];
NSString *filename = [[NSString stringWithFormat:#"video-export-%f", time] stringByAppendingPathExtension:#"mov"];
NSString *pathTo = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/%#", filename]];
NSURL *fileUrl = [NSURL fileURLWithPath:pathTo];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.videoComposition = videoComposition;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.outputURL = fileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"\n\nFailed: %#\n\n", assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"\n\nCancelled: %#\n\n", assetExport.error);
break;
default:
NSLog(#"\n\nExported: %#\n\n", fileUrl);
break;
}
}];
}
What I expect to happen is the first video plays for 1 second at 60% scale, and then the second video plays for 1 second at 90% scale.
What actually happens is the first video plays at both 60% and 90% at the start of the video. After 1 second, the video goes black but the audio plays correctly.
Any ideas? Thanks!
Figured it out for anyone who is curious. In my layer instructions, I was mistakenly building them using the AVURLAsset's videoTrack, not the AVMutableComposition's compositionTrack!
This line:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
Should be:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];

Merging audio and video file in ios

I am trying to merge Audio and Video file, below is my code :
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:_BAckgroungMusicFileURL options:nil];
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:_AppDel._RecordedVideoPath] options:nil];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,24);
videoComposition.renderScale = 1.0;
AVMutableCompositionTrack *compositionCommentaryTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI/2);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetPassthrough];
NSDate *_TodayDate = [NSDate dateWithTimeIntervalSinceNow:0];
_CalenderDate = [_TodayDate description];
_CombinedVideoPath = [NSString stringWithFormat:#"%#/%#.mov", DOCUMENTS_FOLDER, _CalenderDate] ;
NSURL *exportUrl = [NSURL fileURLWithPath:_CombinedVideoPath];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
NSLog(#"file type %#",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
[self SaveVideo];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
[self mergingCompleted];
break;
case AVAssetExportSessionStatusCancelled:
[self mergingCompleted];
NSLog(#"Export Session Status: %d", _assetExport.status);
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
break;
}
}];
Here the video to be merged is recorded video. But my problem is that, when I am trying to merge this audio and video the resultant video contains the recorded video and the audio. It does not contain the audio of the recorded video. How can I have the video its audio to be merged with another audio.
Any help will be appreciated.
Thanks in advance

How to mix audio with video in ios?

Im trying to mix one audio file with Video file, but i got an error. " Export failed: The operation could not be completed "
Please correct this code if there is an error.
My Code:
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *str=[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:#"bgsong.mp3"];
NSString* audio_inputFilePath =str;
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"movie.mp4";
NSArray *paths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [documentsDirectory stringByAppendingPathComponent:outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
BOOL _success = false;
switch ([_assetExport status]) {
case AVAssetExportSessionStatusCompleted:
_success = true;
NSLog(#"Export Completed");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Export Waiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Export Exporting");
break;
case AVAssetExportSessionStatusFailed:
{
NSError *error = [_assetExport error];
NSLog(#"Export failed: %#", [error localizedDescription]);
break;
}
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
if (_success == true) {
ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
[assetLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileUrl completionBlock:^(NSURL *assetURL, NSError *error){
NSError *removeError = nil;
[[NSFileManager defaultManager] removeItemAtURL:outputFileUrl error:&removeError];
}];
}
}
];
}
Thanks in Advance
Please try this one ..
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//audio File
NSMutableArray *loTempArr = [[[Database sharedDBDetails] getAllUserDetails:kaudioTable] mutableCopy];
TempFile *lotemp1 = [[TempFile alloc] init];
TempFile *loTemp2 = [[TempFile alloc] init];
loTemp2 = [mallVideoArray objectAtIndex:self.slectedVideoIndex];
for (int i = 0; i < [loTempArr count]; i++)
{
lotemp1 = [loTempArr objectAtIndex:i];
if (loTemp2.mTemp_Key == [lotemp1.mTemp_videorefID intValue])
{
//NSLog(#"%#",lotemp1.mTemp_AudioName);
NSString *filepath = [kDocument_Path stringByAppendingString:[NSString stringWithFormat:#"/audioFolder/%#",lotemp1.mTemp_AudioName]];
NSURL *SongURL = [NSURL fileURLWithPath:filepath];
self.audioAsset = [[AVURLAsset alloc] initWithURL:SongURL options:nil];
CMTime time2=CMTimeMake([lotemp1.mTemp_timeinvideo doubleValue]*600, 600);
AVMutableCompositionTrack *compositionCommentaryTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeSubtract(self.videoAsset.duration, time2))
ofTrack:[[self.audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:time2 error:nil];
}
}
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform transform = CGAffineTransformIdentity;//
transform = videoAssetTrack.preferredTransform;
[videolayerInstruction setTransform:transform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
// 3.3 - Add instructions
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
float renderWidth, renderHeight;
renderWidth = self.movieController.view.frame.size.width;
renderHeight = self.movieController.view.frame.size.height;
CGSize size;
if(flipActionFlag == 4 || flipActionFlag == 5)
{
size = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}
else
{
size = videoAssetTrack.naturalSize;
}
//NSLog(#"%#",NSStringFromCGSize(size));
mainCompositionInst.renderSize = size;//size//CGSizeMake(renderWidth, renderHeight)
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// 4 - Get path
TempFile *mnewtemp = [[TempFile alloc] init];
mnewtemp.mTemp_videoName = [NSString stringWithFormat:#"Video_%d.m4v",loTemp.mTemp_Key+1];
[[Database sharedDBDetails] insertNewRowWithData:mnewtemp forTable:kvideoTable];
NSString *myPathDocs = [kDocument_Path stringByAppendingPathComponent:
[NSString stringWithFormat:#"Video/Video_%d.m4v",loTemp.mTemp_Key+1]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;//#"com.apple.quicktime-movie";AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
int exportStatus = exporter.status;
NSLog(#"exportStatus = %d",exportStatus);
switch (exportStatus)
{
case AVAssetExportSessionStatusFailed: {NSError *exportError = exporter.error;NSLog (#"AVAssetExportSessionStatusFailed: %#", exportError);break;}
case AVAssetExportSessionStatusCompleted: { NSLog (#"AVAssetExportSessionStatusCompleted--"); break;}
case AVAssetExportSessionStatusUnknown: { NSLog (#"AVAssetExportSessionStatusUnknown"); break;}
case AVAssetExportSessionStatusExporting: { NSLog (#"AVAssetExportSessionStatusExporting"); break;}
case AVAssetExportSessionStatusCancelled: { NSLog (#"AVAssetExportSessionStatusCancelled"); break;}
case AVAssetExportSessionStatusWaiting:{NSLog (#"AVAssetExportSessionStatusWaiting"); break;}
default: { NSLog (#"didn't get export status"); break;}
}
dispatch_async(dispatch_get_main_queue(), ^
{
[self exportDidFinish:exporter];
});
}];
}}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
[losaveView removeFromSuperview];
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error)
{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
else
{
self.mallVideoArray = [[[Database sharedDBDetails] getAllUserDetails:kvideoTable] mutableCopy];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album"
delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
});
}];
}
}
}
I got solution for my question.
- (IBAction)MergeAndSave:(id)sender
{
NSString *str=[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:#"bgsong.mp3"];
NSString* audio_inputFilePath =str;
NSURL *SongURL =[NSURL fileURLWithPath:audio_inputFilePath];
audioAsset = [AVAsset assetWithURL:SongURL];
NSString* video_inputFileName = #"movie.mp4";
NSArray *paths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
firstAsset = [AVAsset assetWithURL:video_inputFileUrl];
secondAsset = [AVAsset assetWithURL:video_inputFileUrl];
if(firstAsset !=nil && secondAsset!=nil){
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil];
//AUDIO TRACK
if(audioAsset!=nil){
AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(self.view.frame.size.width, self.view.frame.size.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo.mov"]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self playVideo];
//[self performSelector:#selector(playVideo) withObject:nil afterDelay:2.0];
// [self exportDidFinish:exporter];
});
}];
}
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
if(session.status == AVAssetExportSessionStatusCompleted){
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
}else{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles: nil];
[alert show];
}
});
}];
}
}
audioAsset = nil;
firstAsset = nil;
secondAsset = nil;
}

Resources