I want to export a video file from a composition with two video's (with audio) and one audio track. It works fine for iPhone 5s and later, but it fails on a iPhone 5c (iOS 9.2.1). The error is returned on this:
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:#selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Export error: %#", _assetExport.error);
[self performSelectorOnMainThread:#selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
The log that it printed:
Export error: Error Domain=AVFoundationErrorDomain Code=-11820 "Cannot Complete Export" UserInfo={NSLocalizedRecoverySuggestion=Try exporting again., NSLocalizedDescription=Cannot Complete Export}
As stated, on my iPhone 5s, 6 and 6s it works very good, but only on my iPhone 5c it returns this error. Hopefully someone has experience with this.
The full code for creating the tracks and composition:
- (void) generateVideoWithInputPath:(NSString*)inputVideo andAudioFileName:(NSString*)audioFileName andVolume:(float)volume {
NSString* introVideoPath = [[NSBundle mainBundle] pathForResource:#"IntroVideo" ofType:#"mp4"];
NSURL* introVideoUrl = [NSURL fileURLWithPath:introVideoPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:inputVideo];
self.outputAssetURL = NULL;
self.outputFilePath = finalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
unlink([self.outputFilePath UTF8String]); // remove existing result
// Create composition
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Create Asset for introVideo
AVURLAsset* introVideoAsset = [[AVURLAsset alloc] initWithURL:introVideoUrl options:nil];
// Create time ranges
CMTime introStartTime = kCMTimeZero;
CMTime introEndTime = introVideoAsset.duration;
CMTimeRange introVideo_timeRange = CMTimeRangeMake(introStartTime, introEndTime);
//add VideoTrack of introVideo to composition
NSArray* introVideoAssetTracks = [introVideoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* introVideoAssetTrack = ([introVideoAssetTracks count] > 0 ? [introVideoAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:introVideo_timeRange ofTrack:introVideoAssetTrack atTime:introStartTime error:nil];
// Add AudioTrack of introVideo to composition
NSArray* audioAssetTracksIntro = [introVideoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrackIntro = ([audioAssetTracksIntro count] > 0 ? [audioAssetTracksIntro objectAtIndex:0] : nil);
AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:introVideo_timeRange ofTrack:audioAssetTrackIntro atTime:introStartTime error:nil];
// Create Asset for inputVideo
CMTime nextClipStartTime = introEndTime;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// Create time ranges
CMTime videoStartTime = kCMTimeZero;
CMTime videoEndTime = videoAsset.duration;
if (CMTIME_IS_INVALID(videoEndTime)) {
NSLog(#"videoEndTime is invalid");
}
CMTimeRange mainVideo_timeRange = CMTimeRangeMake(videoStartTime, videoEndTime);
// Add VideoTrack of inputVideo to composition
NSArray* videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
// CMTime audioDurationFix = CMTimeAdd(videoAsset.duration, CMTimeMakeWithSeconds(-1.0f, 1));
// CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioDurationFix);
AVMutableCompositionTrack* a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:mainVideo_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];
// Add AudioTrack of inputVideo to composition
NSArray* audioAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack2 = ([audioAssetTracks2 count] > 0 ? [audioAssetTracks2 objectAtIndex:0] : nil);
//AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack2 atTime:nextClipStartTime error:nil];
AVMutableAudioMix* audioMix = NULL;
if (audioFileName) {
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFileName];
// Create Asset for audio (song)
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
// Add Audio of song to composition
NSArray* audioAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack = ([audioAssetTracks count] > 0 ? [audioAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack atTime:nextClipStartTime error:nil];
// Set Volume of song
NSArray *tracksToDuck = [mixComposition tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *trackMixArray = [NSMutableArray array];
// for (int i = 0; i < [tracksToDuck count]; i++) {
AVAssetTrack *leTrack = [tracksToDuck objectAtIndex:0];
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack];
[trackMix setVolume:1 atTime:kCMTimeZero];
[trackMixArray addObject:trackMix];
AVAssetTrack *leTrack2 = [tracksToDuck objectAtIndex:1];
AVMutableAudioMixInputParameters *trackMix2 = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack2];
[trackMix2 setVolume:volume atTime:kCMTimeZero];
[trackMixArray addObject:trackMix2];
// }
audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = trackMixArray;
}
// Export composition to videoFile
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie; //#"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = [self getVideoComposition:videoAsset intro:introVideoAsset composition:mixComposition];
// Set song volume audio
if (audioMix != NULL) {
_assetExport.audioMix = audioMix;
}
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:#selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Export error: %#", _assetExport.error);
[self performSelectorOnMainThread:#selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
}
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset intro:(AVAsset *)intro composition:( AVMutableComposition*)composition{
AVMutableCompositionTrack *compositionIntroTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *audioTracksArray = [intro tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *introTrack;
if (audioTracksArray.count > 0) {
introTrack = [audioTracksArray objectAtIndex:0];
[compositionIntroTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, intro.duration) ofTrack:introTrack atTime:kCMTimeZero error:nil];
}
NSArray *videoTracksArray = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack;
if (videoTracksArray.count > 0) {
videoTrack = [videoTracksArray objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:intro.duration error:nil];
}
AVMutableVideoCompositionLayerInstruction *firstLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionIntroTrack];
AVMutableVideoCompositionLayerInstruction *secondLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGSize videoSize;
if (videoTrack && introTrack) {
CGSize trackDimensions = [videoTrack naturalSize];
videoSize = CGSizeMake(0, 0);
// turn around for portrait
if (trackDimensions.height>trackDimensions.width) {
videoSize = CGSizeMake(trackDimensions.width, trackDimensions.height);
} else {
videoSize = CGSizeMake(trackDimensions.height, trackDimensions.width);
}
CGAffineTransform transform = videoTrack.preferredTransform;
CGAffineTransform scale = CGAffineTransformMakeScale((videoSize.width/introTrack.naturalSize.width),(videoSize.height/introTrack.naturalSize.height));
[firstLayerInst setTransform:scale atTime:kCMTimeZero];
[secondLayerInst setTransform:transform atTime:kCMTimeZero];
} else {
videoSize = [[FilteringClass sharedFilteringClass] getVideoSize];
}
CMTime totalTime = CMTimeAdd(asset.duration, intro.duration);
NSLog(#"Total videotime: %lld", totalTime.value);
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
inst.layerInstructions = [NSArray arrayWithObjects:firstLayerInst, secondLayerInst, nil];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
In my opinion you are hitting the decoder limit set in AVFoundation.In iOS 5 the decoder limit is 4 and in iOS 6 it is 16 ,so try to export small size video if it is working means the problem is with your video file ...might be it exceeds the decode set limit.
Related
When I am blending two videos with AVAssetExportSession in ios 9 its working perfectly. but when i blend with AVAssetExportSession in iOS 10, it in not working. Please help me if any know the reason, Thank you.
actualy code is working for iphone 6s and earlier, but not for working for iPhone 7
for example
-(void) blendVideoOverVideo:(NSURL*)mainVideoUrl andBlendVideoUrl:(NSURL*)liveEffectUrl
{
AVURLAsset *mainVideoUrlAsset =[AVURLAsset URLAssetWithURL:mainVideoUrl options:nil];
// AVPlayerItem* mainVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:mainVideoUrlAsset];
AVAssetTrack* mainVideoTrack =[[mainVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
CGSize mainVideoSize = [mainVideoTrack naturalSize];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:mainVideoUrl options:nil];
if(mainVideoUrl!=nil)
{
if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count])
{
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration )
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
}
}
AVMutableCompositionTrack *mainVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[mainVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) ofTrack:mainVideoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *mainVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mainVideoConpositionTrack];
//SEcond Track
AVURLAsset *blendVideoUrlAsset =[AVURLAsset URLAssetWithURL:liveEffectUrl options:nil];
// AVPlayerItem* blendVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:blendVideoUrlAsset];
AVAssetTrack* blendVideoTrack =[[blendVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
CGSize blendVideoSize = [blendVideoTrack naturalSize];
AVMutableCompositionTrack *blendVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime oldTime=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
// CMTime timeNew=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration)/2, blendVideoUrlAsset.duration.timescale);
[blendVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, oldTime) ofTrack:blendVideoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *blendVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:blendVideoConpositionTrack];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration);
CGAffineTransform Scale = CGAffineTransformMakeScale(1.0f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(0,0);
[mainVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
[blendVideoLayerInstruction setOpacity:0.5 atTime:kCMTimeZero];
// [blendVideoLayerInstruction setOpacity:0.0 atTime:timeNew];
CGFloat cropOffX = 1.0;
CGFloat cropOffY = 1.0;
if(blendVideoSize.height>mainVideoSize.height)
{
cropOffY = mainVideoSize.height/blendVideoSize.height;
}else{
cropOffY = mainVideoSize.height/blendVideoSize.height;
}
if(blendVideoSize.width>mainVideoSize.width)
{
cropOffX = mainVideoSize.width/blendVideoSize.width;
}
Scale = CGAffineTransformMakeScale(cropOffX,cropOffY);
Move = CGAffineTransformMakeTranslation(0.1, 0.1);
[blendVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:blendVideoLayerInstruction,mainVideoLayerInstruction,nil];
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = mainVideoSize;
NSString *fullName= [NSString stringWithFormat:#"video%d.mov",arc4random() % 1000];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:fullName];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
CMTime start;
CMTime duration;
NSLog(#"Main Video dura %f blend dura - %f, ",CMTimeGetSeconds(mainVideoUrlAsset.duration),CMTimeGetSeconds(blendVideoUrlAsset.duration));
if(CMTimeGetSeconds(blendVideoUrlAsset.duration)>CMTimeGetSeconds(mainVideoUrlAsset.duration))
{
start = CMTimeMakeWithSeconds(0.0, blendVideoUrlAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
}
else if(CMTimeGetSeconds(mainVideoUrlAsset.duration)>CMTimeGetSeconds(blendVideoUrlAsset.duration))
{
start = CMTimeMakeWithSeconds(0.0, mainVideoUrlAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), mainVideoUrlAsset.duration.timescale);
}
CMTimeRange range = CMTimeRangeMake(start, duration);
exporter.timeRange = range;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
__weak typeof(self) weakSelf = self;
[weakSelf createMBCircularProgress:exporter];
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf exportDidFinish:exporter];
});
}];
}
this code will run in ios 9 and even iOS 10 in iPhone 6s, 6,5 etc but this code will not run in iPhone 7 simulator.
The solution is we need to use latest XCode 8.1 beta for running this
It's a bug.
It's fixed in Xcode 8.1 beta.
Xcode 8.1 beta [AVAssetExportSession allExportPresets] iPhone 7 Simulator now returns
AVAssetExportPreset1920x1080,
AVAssetExportPresetLowQuality,
AVAssetExportPresetAppleM4A,
AVAssetExportPreset640x480,
AVAssetExportPreset3840x2160,
AVAssetExportPresetHighestQuality,
AVAssetExportPreset1280x720,
AVAssetExportPresetMediumQuality,
AVAssetExportPreset960x540
Xcode 8.0 [AVAssetExportSession allExportPresets] iPhone 7 Simulator returns an empty array
AVAssetExportSession can be NULL, so need to check NULL before work on it
https://developer.apple.com/library/content/qa/qa1730/_index.html
I'm working on an app that will need to concat a group of videos recorded from camera. Ultimately I'll have an array of URL's to work with but I can't figure out how to get two movie assets to concat properly. Here's some standalone code:
- (void)buildComposition {
NSString *path1 = [[NSBundle mainBundle] pathForResource:#"IMG_1049" ofType:#"MOV"];
NSString *path2 = [[NSBundle mainBundle] pathForResource:#"IMG_1431" ofType:#"MOV"];
NSURL *url1 = [NSURL fileURLWithPath:path1];
NSURL *url2 = [NSURL fileURLWithPath:path2];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
NSMutableArray *layerInstructions = [NSMutableArray array];
CGSize renderSize = CGSizeZero;
NSUInteger count = 0;
for (NSURL *url in #[url1, url2]) {
NSDictionary *options = #{ AVURLAssetPreferPreciseDurationAndTimingKey: #(YES) };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
CMTimeRange editRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(1.0, 600));
NSError *error = nil;
CMTime insertionTime = composition.duration;
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = videoTracks.firstObject;
AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompositionTrack insertTimeRange:editRange ofTrack:videoTrack atTime:insertionTime error:&error];
if (count == 0) {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.6, 0.6);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
else {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.9, 0.9);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
// set the render size
CGRect transformed = CGRectApplyAffineTransform(CGRectMakeWithCGSize(videoTrack.naturalSize), videoTrack.preferredTransform);
renderSize = CGSizeUnion(renderSize, transformed.size);
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *audioTrack = audioTracks.firstObject;
AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompositionTrack insertTimeRange:editRange ofTrack:audioTrack atTime:insertionTime error:&error];
++count;
}
// set the composition instructions
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration);
compositionInstruction.layerInstructions = layerInstructions;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:composition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.instructions = #[compositionInstruction];
videoComposition.renderSize = renderSize;
// export the composition
NSTimeInterval time = [NSDate timeIntervalSinceReferenceDate];
NSString *filename = [[NSString stringWithFormat:#"video-export-%f", time] stringByAppendingPathExtension:#"mov"];
NSString *pathTo = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/%#", filename]];
NSURL *fileUrl = [NSURL fileURLWithPath:pathTo];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.videoComposition = videoComposition;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.outputURL = fileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"\n\nFailed: %#\n\n", assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"\n\nCancelled: %#\n\n", assetExport.error);
break;
default:
NSLog(#"\n\nExported: %#\n\n", fileUrl);
break;
}
}];
}
What I expect to happen is the first video plays for 1 second at 60% scale, and then the second video plays for 1 second at 90% scale.
What actually happens is the first video plays at both 60% and 90% at the start of the video. After 1 second, the video goes black but the audio plays correctly.
Any ideas? Thanks!
Figured it out for anyone who is curious. In my layer instructions, I was mistakenly building them using the AVURLAsset's videoTrack, not the AVMutableComposition's compositionTrack!
This line:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
Should be:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
I have composed multiple videos and I want to remove last 0.5 second of all the tracks in the composition. I believe removeTimeRange can be used in situation.
Doc for this methods reads as
Removes a specified timeRange from all tracks of the composition
But not able to figure out the range that should be given to achieve this. My composition code is following:
AVAsset *asset0 = [self currentAsset:0];
AVAsset *asset1 = [self currentAsset:1];
AVAsset *asset2 = [self currentAsset:2];
AVAsset *asset3 = [self currentAsset:3];
AVAsset *asset4 = [self currentAsset:4];
NSArray *assets = #[asset0, asset1, asset2, asset3, asset4];
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
NSMutableArray *instructions = [NSMutableArray new];
CGSize size = CGSizeZero;
CMTime time = kCMTimeZero;
for (AVAsset *asset in assets)
{
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error - %#", error.debugDescription);
}
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)
ofTrack:audioAssetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error - %#", error.debugDescription);
}
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(time, assetTrack.timeRange.duration);
videoCompositionInstruction.layerInstructions = #[[AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack]];
[instructions addObject:videoCompositionInstruction];
time = CMTimeAdd(time, assetTrack.timeRange.duration);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = instructions;
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
mutableVideoComposition.renderSize = size;
pi = [AVPlayerItem playerItemWithAsset:mutableComposition];
pi.videoComposition = mutableVideoComposition;
player = [AVPlayer playerWithPlayerItem:[[CameraEngine engine] pi]];
player.volume = 0.75;
playerLayer = [AVPlayerLayer playerLayerWithPlayer: player];
playerLayer.frame = self.bounds;
[self.layer addSublayer: playerLayer];
[playerLayer setNeedsDisplay];
[player play];
I want to remove 0.5 sec of video from all 5 tracks after composition because I get blank frames in between when track changes.
All the tracks are fine(no black frame at the end).
I have tried removing frames directly from AVMutableCompositionTrack but againg blank frames comes after composition.
So I want to know how to produce this time range?
I'm having problem to set the audio volume when mixing a recorded video and an audio from resource.
Here is my code:
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
NSString *resourcePath = [[NSBundle mainBundle] pathForResource:#"give-it-away" ofType:#"mp3"];
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:resourcePath] options:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES],AVURLAssetPreferPreciseDurationAndTimingKey, nil]];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[videoAsset.tracks objectAtIndex:0] atTime:kCMTimeZero error:nil];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTime) ofTrack:[audioAsset.tracks objectAtIndex:0] atTime:kCMTimeZero error:&videoError];
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioTrack] ;
[audioInputParams setVolume:0.3 atTime:kCMTimeZero];
[audioInputParams setTrackID:audioTrack.trackID];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithObject:audioInputParams];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = [NSURL fileURLWithPath:finalVideoWithAudioPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.audioMix = audioMix;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
[self performSelectorOnMainThread:#selector(doPostExportFailed) withObject:nil waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCompleted:
{
[self performSelectorOnMainThread:#selector(doPostExportSuccess) withObject:nil waitUntilDone:YES];
break;
}
};
}];
The export completes successfully, but the audio volume does not change.
What am I doing wrong?
Thanks
UPDATED (December 2018)
This code is working for me on iOS 12.1.2:
- (void) combineAudio:(NSString*)audioPath forRecord:(VideoRecord*)record isResource:(BOOL)isResource isSilent:(BOOL)isSilent keepCurrentAudio:(BOOL)keepCurrentAudio withCompletionHandler:(void (^)(AVAssetExportSession* exportSession, NSString* exportPath))handler {
NSString *resourcePath = audioPath;
if (isResource) {
resourcePath = [[NSBundle mainBundle] pathForResource:resourcePath ofType:#"mp3"];
}
NSURL *url = [NSURL fileURLWithPath:resourcePath];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL URLWithString:[NSString stringWithFormat:#"file://%#",record.videoPath]] options:nil];
AVMutableComposition *composition = [self getComposition];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioOriginalTrack = nil;
if (keepCurrentAudio && [videoAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) {
compositionAudioOriginalTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioOriginalTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
if (isResource) {
CMTime videoDuration = videoAsset.duration;
if(CMTimeCompare(videoDuration, audioAsset.duration) == -1){
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
} else if(CMTimeCompare(videoDuration, audioAsset.duration) == 1) {
CMTime currentTime = kCMTimeZero;
while(YES){
CMTime audioDuration = audioAsset.duration;
CMTime totalDuration = CMTimeAdd(currentTime,audioDuration);
if(CMTimeCompare(totalDuration, videoDuration)==1){
audioDuration = CMTimeSubtract(totalDuration,videoDuration);
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioDuration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:currentTime error:nil];
currentTime = CMTimeAdd(currentTime, audioDuration);
if(CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0){
break;
}
}
}
} else {
NSArray<AVAssetTrack *>* aTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
if (aTracks.count > 0) {
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[aTracks objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
}
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
if (tracks.count == 0) {
CLSNSLog(#"%# - combineAudio - video tracks zero", NSStringFromClass([self class]));
// TODO - Handle this error.
return;
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[tracks objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:[composition copy]
presetName:AVAssetExportPresetHighestQuality];
NSString *exportPath = [record.videoPath stringByReplacingOccurrencesOfString:#".mp4" withString:#"_audio_added.mp4"];
if ([record.videoPath containsString:#".MOV"]) {
exportPath = [record.videoPath stringByReplacingOccurrencesOfString:#".MOV" withString:#"_audio_added.mp4"];
}
if ([record.videoPath containsString:#".mov"]) {
exportPath = [record.videoPath stringByReplacingOccurrencesOfString:#".mov" withString:#"_audio_added.mp4"];
}
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
float volume = .5f;
if (keepCurrentAudio) {
volume = .6f;
}
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioTrack] ;
[audioInputParams setVolumeRampFromStartVolume:(isSilent ? .0f : volume) toEndVolume:(isSilent ? .0f : volume) timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[audioInputParams setTrackID:compositionAudioTrack.trackID];
NSArray *inputParams = [NSArray arrayWithObject:audioInputParams];
AVMutableAudioMixInputParameters *audioOriginalInputParams = nil;
if (keepCurrentAudio) {
audioOriginalInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioOriginalTrack] ;
[audioInputParams setVolumeRampFromStartVolume:(isSilent ? .0f : .06f) toEndVolume:(isSilent ? .0f : .06f) timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[audioInputParams setTrackID:compositionAudioOriginalTrack.trackID];
inputParams = [NSArray arrayWithObjects:audioInputParams, audioOriginalInputParams, nil];
}
audioMix.inputParameters = inputParams;
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.audioMix = audioMix;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
handler(_assetExport, exportPath);
}];
}
I call this function like this (sound is the name of the resource file):
[[FBVideoEditor shared] combineAudio:sound forRecord:self.record isResource:YES isSilent:NO withCompletionHandler:^(AVAssetExportSession *exportSession, NSString *exportPath) {
switch (exportSession.status)
{
case AVAssetExportSessionStatusCancelled:
CLSNSLog(#"AVAssetExportSessionStatusCancelled");
break;
case AVAssetExportSessionStatusExporting:
CLSNSLog(#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusUnknown:
CLSNSLog(#"AVAssetExportSessionStatusUnknown");
break;
case AVAssetExportSessionStatusWaiting:
CLSNSLog(#"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusFailed:
{
CLSNSLog(#"Export Failed with error messsage: %#", exportSession.error.userInfo );
break;
}
case AVAssetExportSessionStatusCompleted:
{
// Success
}
};
}];
The VideoRecord class contains all necessary data for my videos. But in this case, it only uses the video path, so you can change it for a simple NSString.
Hope it helps.
When i try to merge two videos one with front camera and the other with back camera, There is a problem in the orientation in one of the videos, I use this code :
-(void) combineMultibleVideoSegments
{
NSError *error;
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CMTime currentTime = kCMTimeZero;
CGFloat videoTrackY = 0.0;
CGFloat videoTrackWidth=0.0;
for (int i = 0; i<videosPathsArray.count; i++)
{
NSURL *videoURL = [NSURL fileURLWithPath:[videosPathsArray objectAtIndex:i]];
AVURLAsset* sourceAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
NSLog(#"path = %#" , [videosPathsArray objectAtIndex:i]);
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
videoTrackY = sourceVideoTrack.naturalSize.height;
videoTrackWidth = sourceVideoTrack.naturalSize.width;
if ([[VideosReocrdCamera objectAtIndex:i] isEqual:#"Yes"])
{
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI/2);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
}
else
{
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(0.0);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[compositionVideoTrack setPreferredTransform:rotationTransform];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceAudioTrack atTime:currentTime error:&error];
ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceVideoTrack atTime:currentTime error:&error];
currentTime = CMTimeAdd(currentTime, [sourceAsset duration]);
if (!ok) {
// Deal with the error.
NSLog(#"something went wrong");
}
NSLog(#"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));
}
So can any one tell me what i can do to fix this problem