I have composed multiple videos and I want to remove last 0.5 second of all the tracks in the composition. I believe removeTimeRange can be used in situation.
Doc for this methods reads as
Removes a specified timeRange from all tracks of the composition
But not able to figure out the range that should be given to achieve this. My composition code is following:
AVAsset *asset0 = [self currentAsset:0];
AVAsset *asset1 = [self currentAsset:1];
AVAsset *asset2 = [self currentAsset:2];
AVAsset *asset3 = [self currentAsset:3];
AVAsset *asset4 = [self currentAsset:4];
NSArray *assets = #[asset0, asset1, asset2, asset3, asset4];
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
NSMutableArray *instructions = [NSMutableArray new];
CGSize size = CGSizeZero;
CMTime time = kCMTimeZero;
for (AVAsset *asset in assets)
{
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error - %#", error.debugDescription);
}
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)
ofTrack:audioAssetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error - %#", error.debugDescription);
}
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(time, assetTrack.timeRange.duration);
videoCompositionInstruction.layerInstructions = #[[AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack]];
[instructions addObject:videoCompositionInstruction];
time = CMTimeAdd(time, assetTrack.timeRange.duration);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = instructions;
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
mutableVideoComposition.renderSize = size;
pi = [AVPlayerItem playerItemWithAsset:mutableComposition];
pi.videoComposition = mutableVideoComposition;
player = [AVPlayer playerWithPlayerItem:[[CameraEngine engine] pi]];
player.volume = 0.75;
playerLayer = [AVPlayerLayer playerLayerWithPlayer: player];
playerLayer.frame = self.bounds;
[self.layer addSublayer: playerLayer];
[playerLayer setNeedsDisplay];
[player play];
I want to remove 0.5 sec of video from all 5 tracks after composition because I get blank frames in between when track changes.
All the tracks are fine(no black frame at the end).
I have tried removing frames directly from AVMutableCompositionTrack but againg blank frames comes after composition.
So I want to know how to produce this time range?
Related
I m using bellow method to add watermark image to video, but the problem is when I record the video and add watermark to it, video rotates by 90 degree.
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{
AVURLAsset *audioAsset, *videoAsset;
//Create AVMutableComposition Object which will hold our multiple AVMutableCompositionTrack or we can say it will hold our video and audio files.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
audioAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//Now we will load video file.
videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the second AVMutableCompositionTrack containing our video and add it to our AVMutableComposition object.
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *aAudioAssetTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);
[a_compositionVideoTrack setPreferredTransform:aVideoAssetTrack.preferredTransform];
#try{
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
if(CMTimeGetSeconds(audioAsset.duration) == CMTimeGetSeconds(videoAsset.duration)){
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
else{
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
}
#catch(NSError *error){
}
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)watermarkImage.CGImage;
CGSize videoSize = [aVideoAssetTrack naturalSize];
CGFloat videoScale = videoSize.width/containerView.frame.size.width;
aLayer.frame = CGRectMake(0, 0, containerView.frame.size.width * videoScale, containerView.frame.size.height * videoScale);
aLayer.opacity = 0.9;
//sorts the layer in proper order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
printf("Video Size %f %f",videoSize.width,videoSize.height);
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && a_compositionVideoTrack) {
[a_compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
AVAssetExportSession *_assetExport;
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
completion(_assetExport);
});
}
];
}
I am using UIImagePickerController to record a video. And I am using AVPlayer to play video picked library, adding AVPlayerLayer to cameraOverlayView to see video while recording.
But I need to export the video that merge 2 videos (one is recorded video and one is library video). The result video should be the same with the view while I record (include 2 video).
Please help me the way to do that.
Finally, I found the solution. Simple than I think, AVFoundation make all for done my requirements.
//Load video using AVURLAsset
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource: #"file1" ofType: #"mp4"]] options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource: #"file2" ofType: #"mp4"]] options:nil];
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//Here we are creating the first AVMutableCompositionTrack.See how we are adding a new track to our AVMutableComposition.
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//Now we set the length of the firstTrack equal to the length of the firstAsset and add the firstAsset to out newly created track at kCMTimeZero so video plays from the start of the track.
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//Now we repeat the same process for the 2nd track as we did above for the first track.Note that the new track also starts at kCMTimeZero meaning both tracks will play simultaneously.
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//Create instruction
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration);
//Create layer instruction for first video
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.7f,0.7f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(200,120);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
//Create layer instruction for second video
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.2f,1.2f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
//Add the layer instruction to the composition instruction
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
//Add composition instruction to video composition
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(640, 480);
And if you want to play the video composition
AVPlayerItem * newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = MainCompositionInst;
self.mPlayer = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
[self.mPlayer addObserver:self forKeyPath:#"status" options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:nil];
AVPlayerLayer *layer = [AVPlayerLayer playerLayerWithPlayer:self.mPlayer];
self.mPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
layer.frame = CGRectMake(0, 0, 640, 480);
[self.view.layer addSublayer: layer];
[self.mPlayer play];
And if you want to export the video composition to document directory
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"outputFile_%#.mp4",#"Main"]];
NSURL* outputFileUrl = [NSURL fileURLWithPath:path];
exportSession = [[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
exportSession.videoComposition = MainCompositionInst;
exportSession.outputFileType = #"public.mpeg-4";
exportSession.outputURL = outputFileUrl;
NSLog(#"duration = %f", CMTimeGetSeconds(mixComposition.duration));
exportSession.timeRange=CMTimeRangeMake(kCMTimeZero, mixComposition.duration);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch(exportSession.status){
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting...");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export completed, wohooo!! \n Check %#", path2);
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting...");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Cancelled");
break;
case AVAssetExportSessionStatusUnknown:
NSLog(#"Unknown");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed with error: %#, try to save on %#", exportSession.error, path2);
break;
}
}];
Finally, if you want to tracking the progress
//Add a NSTimer for refresh checking the progress of AVAssetExportSession
exportProgressBarTimer = [NSTimer scheduledTimerWithTimeInterval:.1 target:self selector:#selector(updateExportDisplay) userInfo:nil repeats:YES];
And show the progress
- (void)updateExportDisplay {
NSLog(#"Exporting: %f", exportSession.progress);
if (exportSession.progress > .99) {
[exportProgressBarTimer invalidate];
}
}
I want to export a video file from a composition with two video's (with audio) and one audio track. It works fine for iPhone 5s and later, but it fails on a iPhone 5c (iOS 9.2.1). The error is returned on this:
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:#selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Export error: %#", _assetExport.error);
[self performSelectorOnMainThread:#selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
The log that it printed:
Export error: Error Domain=AVFoundationErrorDomain Code=-11820 "Cannot Complete Export" UserInfo={NSLocalizedRecoverySuggestion=Try exporting again., NSLocalizedDescription=Cannot Complete Export}
As stated, on my iPhone 5s, 6 and 6s it works very good, but only on my iPhone 5c it returns this error. Hopefully someone has experience with this.
The full code for creating the tracks and composition:
- (void) generateVideoWithInputPath:(NSString*)inputVideo andAudioFileName:(NSString*)audioFileName andVolume:(float)volume {
NSString* introVideoPath = [[NSBundle mainBundle] pathForResource:#"IntroVideo" ofType:#"mp4"];
NSURL* introVideoUrl = [NSURL fileURLWithPath:introVideoPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:inputVideo];
self.outputAssetURL = NULL;
self.outputFilePath = finalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
unlink([self.outputFilePath UTF8String]); // remove existing result
// Create composition
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Create Asset for introVideo
AVURLAsset* introVideoAsset = [[AVURLAsset alloc] initWithURL:introVideoUrl options:nil];
// Create time ranges
CMTime introStartTime = kCMTimeZero;
CMTime introEndTime = introVideoAsset.duration;
CMTimeRange introVideo_timeRange = CMTimeRangeMake(introStartTime, introEndTime);
//add VideoTrack of introVideo to composition
NSArray* introVideoAssetTracks = [introVideoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* introVideoAssetTrack = ([introVideoAssetTracks count] > 0 ? [introVideoAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:introVideo_timeRange ofTrack:introVideoAssetTrack atTime:introStartTime error:nil];
// Add AudioTrack of introVideo to composition
NSArray* audioAssetTracksIntro = [introVideoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrackIntro = ([audioAssetTracksIntro count] > 0 ? [audioAssetTracksIntro objectAtIndex:0] : nil);
AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:introVideo_timeRange ofTrack:audioAssetTrackIntro atTime:introStartTime error:nil];
// Create Asset for inputVideo
CMTime nextClipStartTime = introEndTime;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// Create time ranges
CMTime videoStartTime = kCMTimeZero;
CMTime videoEndTime = videoAsset.duration;
if (CMTIME_IS_INVALID(videoEndTime)) {
NSLog(#"videoEndTime is invalid");
}
CMTimeRange mainVideo_timeRange = CMTimeRangeMake(videoStartTime, videoEndTime);
// Add VideoTrack of inputVideo to composition
NSArray* videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
// CMTime audioDurationFix = CMTimeAdd(videoAsset.duration, CMTimeMakeWithSeconds(-1.0f, 1));
// CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioDurationFix);
AVMutableCompositionTrack* a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:mainVideo_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];
// Add AudioTrack of inputVideo to composition
NSArray* audioAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack2 = ([audioAssetTracks2 count] > 0 ? [audioAssetTracks2 objectAtIndex:0] : nil);
//AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack2 atTime:nextClipStartTime error:nil];
AVMutableAudioMix* audioMix = NULL;
if (audioFileName) {
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFileName];
// Create Asset for audio (song)
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
// Add Audio of song to composition
NSArray* audioAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack = ([audioAssetTracks count] > 0 ? [audioAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack atTime:nextClipStartTime error:nil];
// Set Volume of song
NSArray *tracksToDuck = [mixComposition tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *trackMixArray = [NSMutableArray array];
// for (int i = 0; i < [tracksToDuck count]; i++) {
AVAssetTrack *leTrack = [tracksToDuck objectAtIndex:0];
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack];
[trackMix setVolume:1 atTime:kCMTimeZero];
[trackMixArray addObject:trackMix];
AVAssetTrack *leTrack2 = [tracksToDuck objectAtIndex:1];
AVMutableAudioMixInputParameters *trackMix2 = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack2];
[trackMix2 setVolume:volume atTime:kCMTimeZero];
[trackMixArray addObject:trackMix2];
// }
audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = trackMixArray;
}
// Export composition to videoFile
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie; //#"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = [self getVideoComposition:videoAsset intro:introVideoAsset composition:mixComposition];
// Set song volume audio
if (audioMix != NULL) {
_assetExport.audioMix = audioMix;
}
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:#selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Export error: %#", _assetExport.error);
[self performSelectorOnMainThread:#selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
}
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset intro:(AVAsset *)intro composition:( AVMutableComposition*)composition{
AVMutableCompositionTrack *compositionIntroTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *audioTracksArray = [intro tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *introTrack;
if (audioTracksArray.count > 0) {
introTrack = [audioTracksArray objectAtIndex:0];
[compositionIntroTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, intro.duration) ofTrack:introTrack atTime:kCMTimeZero error:nil];
}
NSArray *videoTracksArray = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack;
if (videoTracksArray.count > 0) {
videoTrack = [videoTracksArray objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:intro.duration error:nil];
}
AVMutableVideoCompositionLayerInstruction *firstLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionIntroTrack];
AVMutableVideoCompositionLayerInstruction *secondLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGSize videoSize;
if (videoTrack && introTrack) {
CGSize trackDimensions = [videoTrack naturalSize];
videoSize = CGSizeMake(0, 0);
// turn around for portrait
if (trackDimensions.height>trackDimensions.width) {
videoSize = CGSizeMake(trackDimensions.width, trackDimensions.height);
} else {
videoSize = CGSizeMake(trackDimensions.height, trackDimensions.width);
}
CGAffineTransform transform = videoTrack.preferredTransform;
CGAffineTransform scale = CGAffineTransformMakeScale((videoSize.width/introTrack.naturalSize.width),(videoSize.height/introTrack.naturalSize.height));
[firstLayerInst setTransform:scale atTime:kCMTimeZero];
[secondLayerInst setTransform:transform atTime:kCMTimeZero];
} else {
videoSize = [[FilteringClass sharedFilteringClass] getVideoSize];
}
CMTime totalTime = CMTimeAdd(asset.duration, intro.duration);
NSLog(#"Total videotime: %lld", totalTime.value);
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
inst.layerInstructions = [NSArray arrayWithObjects:firstLayerInst, secondLayerInst, nil];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
In my opinion you are hitting the decoder limit set in AVFoundation.In iOS 5 the decoder limit is 4 and in iOS 6 it is 16 ,so try to export small size video if it is working means the problem is with your video file ...might be it exceeds the decode set limit.
I am merging multiple videos and playing it in AVPlayer. Video starts fine, but after sometime video freezes and only audio plays. I have a UISlider which helps to go forward or backward in video using seekToTime:.
So the weird part is that after video freezes, if I use the slider to go forward or backward, video starts playing perfectly fine. I have tried my app in multiple devices.
So basically I have to use seekToTime: method to give the video a nudge every time it freezes.
My code for merging video and playing back is following:
AVAsset *asset0 = [self currentAsset:0];
AVAsset *asset1 = [self currentAsset:1];
AVAsset *asset2 = [self currentAsset:2];
AVAsset *asset3 = [self currentAsset:3];
AVAsset *asset4 = [self currentAsset:4];
NSArray *assets = #[asset0, asset1, asset2, asset3, asset4];
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
NSMutableArray *instructions = [NSMutableArray new];
CGSize size = CGSizeZero;
CMTime time = kCMTimeZero;
for (AVAsset *asset in assets)
{
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error - %#", error.debugDescription);
}
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)
ofTrack:audioAssetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error - %#", error.debugDescription);
}
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(time, assetTrack.timeRange.duration);
videoCompositionInstruction.layerInstructions = #[[AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack]];
[instructions addObject:videoCompositionInstruction];
time = CMTimeAdd(time, assetTrack.timeRange.duration);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = instructions;
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
mutableVideoComposition.renderSize = size;
pi = [AVPlayerItem playerItemWithAsset:mutableComposition];
pi.videoComposition = mutableVideoComposition;
player = [AVPlayer playerWithPlayerItem:[[CameraEngine engine] pi]];
player.volume = 0.75;
playerLayer = [AVPlayerLayer playerLayerWithPlayer: player];
playerLayer.frame = self.bounds;
[self.layer addSublayer: playerLayer];
[playerLayer setNeedsDisplay];
[player play];
UPDATE : Found out this link which describes similar problem. But his solution is not well understood.
Why do you call [AVPlayer playerWithPlayerItem:[[CameraEngine engine] pi]] instead of [AVPlayer playerWithPlayerItem:pi]? Your construct your composition and assign it to pi while it is not used anywhere. Try to use [AVPlayer playerWithPlayerItem:pi] instead.
If this will not help try to change your videoComposition generation. Try to do the following:
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mutableComposition.timeRange.duration);
videoCompositionInstruction.layerInstructions = #[[AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[mutableComposition tracksWithMediaType:AVMediaTypeVideo].firstObject]];
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = #[videoCompositionInstruction];
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
mutableVideoComposition.renderSize = size;
after your for loop. Hope this helps!
When i try to merge two videos one with front camera and the other with back camera, There is a problem in the orientation in one of the videos, I use this code :
-(void) combineMultibleVideoSegments
{
NSError *error;
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CMTime currentTime = kCMTimeZero;
CGFloat videoTrackY = 0.0;
CGFloat videoTrackWidth=0.0;
for (int i = 0; i<videosPathsArray.count; i++)
{
NSURL *videoURL = [NSURL fileURLWithPath:[videosPathsArray objectAtIndex:i]];
AVURLAsset* sourceAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
NSLog(#"path = %#" , [videosPathsArray objectAtIndex:i]);
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
videoTrackY = sourceVideoTrack.naturalSize.height;
videoTrackWidth = sourceVideoTrack.naturalSize.width;
if ([[VideosReocrdCamera objectAtIndex:i] isEqual:#"Yes"])
{
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI/2);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
}
else
{
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(0.0);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[compositionVideoTrack setPreferredTransform:rotationTransform];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceAudioTrack atTime:currentTime error:&error];
ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceVideoTrack atTime:currentTime error:&error];
currentTime = CMTimeAdd(currentTime, [sourceAsset duration]);
if (!ok) {
// Deal with the error.
NSLog(#"something went wrong");
}
NSLog(#"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));
}
So can any one tell me what i can do to fix this problem