I created a video using Array of images.
It creates the video successfully, then I add audio to that created video file.
I create AVMuatableComposition object, I add Video and audio by creating AVAssetTracks and finally export into single video file with help of AVAssetsExportSession.
Suppose the first video without Audio is vdo.mp4 and final (After adding audio) is final.mp4, So my final.mp4 is lower in size and resolution than the vdo.mp4
Here is my code which combines the both file,
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
NSURL *url = [NSURL fileURLWithPath:filePath];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error];
CMTime audioStartTime = kCMTimeZero;
for ( NSInteger i = 0;i< [mArrAudioFileNames count];i++ )
{
NSString *audioFileName = nil;
NSString *docsDir = nil;
if ( [mArrAudioFileNames objectAtIndex:i] != [NSNull null]) {
audioFileName = [mArrAudioFileNames objectAtIndex:i];
docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
}else{
//audioFileName = #" ";
docsDir = [[NSBundle mainBundle] pathForResource:#"sample" ofType:#"mp3"];
}
// NSString *docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:docsDir] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
Float64 duration = CMTimeGetSeconds(urlAsset.duration);
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) ((duration * kRecordingFPS) + 0.5), kRecordingFPS));
}
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
//assetExport.videoComposition = compositionVideoTrack;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;// #"com.apple.quicktime-movie";
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
[self performSelectorOnMainThread:#selector(creatingVideoDone:)
withObject:outFilePath waitUntilDone:NO];
[assetExport release];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
}
}];
Any help is appreciated.
Thanks.
Related
How do I save slideshow created in camera roll?
(NSString *))handler;
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
NSError * error = nil;
for (int i=0;i<[arrayOfSounds count];i++)
{
NSString *pathString = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:#"%#", [arrayOfSounds objectAtIndex:i]] ofType:#"mp3"];
NSLog(#"pathString = %#",pathString);
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
CMTime audioDuration = videoAsset.duration;
audioDurationSeconds = CMTimeGetSeconds(audioDuration);
int startDur= [[arrayOfTime objectAtIndex:i] intValue];
NSLog(#"startDur = %d",startDur);
CMTime audioStartTime = CMTimeMake(i,1);
CMTime presentTime = CMTimeMake(1, 1);
CMTime EndTime = CMTimeMake(audioDurationSeconds, 1);
CMTime audioEndTime = CMTimeAdd(presentTime, EndTime);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(presentTime,audioEndTime) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
}
NSString* movPath = [NSString stringWithFormat:#"%#/%#",documentsDirectory,#"Export.mov"];
//AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:movPath] options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSError* vidError;
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count >0)? [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]:nil atTime:kCMTimeZero error:&vidError];
NSLog(#"Vid error = %#",vidError);
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
NSString* videoName = #"exportFinal.mov";
NSString *exportPath = [NSString stringWithFormat:#"%#/%#",documentsDirectory,videoName];
NSLog(#"exportPath = %#",exportPath);
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
NSLog(#"file type %#",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(#"Export Complete");
//[self uploadToYouTube];
[self cleanUpProcess];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
}
- (void)cleanUpProcess{
NSError* error;
NSArray *files = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectory error:&error];
for(NSString* myFiles in files){
if([[myFiles pathExtension] isEqualToString:#"mov"]){
continue;
}
NSString* filePath = [NSString stringWithFormat:#"%#/%#",documentsDirectory,myFiles];
if(![[NSFileManager defaultManager] removeItemAtPath:filePath error:&error]){
NSLog(#"Error Deleting");
}
}
}
It is saved in this path:
"Users/devendrasingh/Library/Developer/CoreSimulator/Devices/394AD1C9-E950-422E-BDA1-083CDCEE83F6/data/Containers/Data/Application/98BEC162-C686-4BC1-B698-567B45D65F27/Documents/exportFinal.mov
"
It's not saving in camera roll
Try this:
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
}];
I am really stuck on this, i have merged videos (mp4 kind that are saved in xcode, and mov - from users photo library) and audio (mp3 kind saved in xcode). On the simulator it works fine and shows the video and audio, but on the device it doesn't show the video only plays the sound . here is the code for merging:` AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Audio
//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
NSURL *audio_url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:_video.musicFileName ofType:#"mp3"]];
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audio_url options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//Video
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSString *fistPartMovieFilename = #"firstpart";
NSString *lastPartMovieFilename = #"lastpart";
//[_video.selectedVideosFileName insertObject:fistPartMovieFilename atIndex:0];
//[_video.selectedVideosFileName addObject:lastPartMovieFilename];
NSMutableArray *timeRanges = [NSMutableArray arrayWithCapacity:_video.selectedVideosFileName.count];
NSMutableArray *tracks = [NSMutableArray arrayWithCapacity:_video.selectedVideosFileName.count];
NSLog(#"_video.selectedVideosFileName %#",_video.selectedVideosFileName);
for (int i=0; i<[_video.selectedVideosFileName count]; i++)
{
AVURLAsset *assetClip;
NSString *fileName = [_video.selectedVideosFileName objectAtIndex:i];
if ([[_video.selectedVideosFileName objectAtIndex:i] isKindOfClass:[NSURL class]])
{
assetClip = [AVAsset assetWithURL:[_video.selectedVideosFileName objectAtIndex:i]];
}
else
{
assetClip = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:[NSString stringWithString:fileName] ofType:#"mp4"]] options:nil];
}
AVAssetTrack *clipVideoTrack = [[assetClip tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[timeRanges addObject:[NSValue valueWithCMTimeRange:CMTimeRangeMake(kCMTimeZero, assetClip.duration)]];
[tracks addObject:clipVideoTrack];
NSLog(#"timeRanges %#",timeRanges);
NSLog(#"tracks %#",tracks);
NSLog(#"assetClip %#",assetClip);
NSLog(#"clipVideoTrack %#",clipVideoTrack);
}
NSError *error = nil;
BOOL sucsess = [compositionVideoTrack insertTimeRanges:timeRanges
ofTracks:tracks
atTime:kCMTimeZero
error:&error];
if (!sucsess || error) {
NSLog(#"video error %#", error);
}
the error i get when trying on device is : video error
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x1704707c0
{NSUnderlyingError=0x174247050 "The operation couldn’t be completed. (OSStatus error -12780.)", NSLocalizedFailureReason=An unknown error occurred (-12780), NSLocalizedDescription=The operation could not be completed
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"Zagoori-%d.mov",arc4random() % 1000]];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
//Now create an AVAssetExportSession object that will save your final video at specified path.
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
NSLog(#"supportedFileTypes %#", [_assetExport supportedFileTypes]);
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
switch ([_assetExport status]) {
case AVAssetExportSessionStatusExporting:
NSLog(#"Export in progress ");
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed (%ld): %#", (long)[[_assetExport error] code], [[_assetExport error] localizedFailureReason]);
[_assetExport cancelExport];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export done");
[self exportDidFinish:_assetExport];
// NSLog (#"finished writing %f", [dtStartDate timeIntervalSinceNow]);
break;
case AVAssetExportSessionStatusWaiting:
NSLog (#"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusUnknown:
NSLog (#"AVAssetExportSessionStatusUnknown");
break;
}
});
}];
-(void)loadMoviePlayer:(NSURL*)moviePath {
_moviePlayerController = [[MPMoviePlayerController alloc]
initWithContentURL:moviePath];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(introMovieFinished:)
name:MPMoviePlayerPlaybackDidFinishNotification
object:_moviePlayerController];
_moviePlayerController.view.hidden = NO;
_moviePlayerController.view.frame = CGRectMake(0, 0, _videoView.frame.size.width,
_videoView.frame.size.height);
_moviePlayerController.view.backgroundColor = [UIColor clearColor];
_moviePlayerController.scalingMode = MPMovieScalingModeAspectFit;
_moviePlayerController.fullscreen = NO;
[_moviePlayerController prepareToPlay];
[_moviePlayerController readyForDisplay];
[_moviePlayerController setControlStyle:MPMovieControlStyleDefault];
_moviePlayerController.shouldAutoplay = YES;
[_videoView addSubview:_moviePlayerController.view];
[_videoView setHidden:NO];
}
I am really clueless of what the problem could be, Help please! :)
so i have found the problem,it is caused by the AVURLAsset instances
Since the AVURLAsset *assetClip is in inside the for loop, it is not valid outside of it and neither is the tracks that i extracted.
I kept my assetClip in an array and that solved the problem.
Im trying to mix one audio file with Video file, but i got an error. " Export failed: The operation could not be completed "
Please correct this code if there is an error.
My Code:
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *str=[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:#"bgsong.mp3"];
NSString* audio_inputFilePath =str;
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"movie.mp4";
NSArray *paths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [documentsDirectory stringByAppendingPathComponent:outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
BOOL _success = false;
switch ([_assetExport status]) {
case AVAssetExportSessionStatusCompleted:
_success = true;
NSLog(#"Export Completed");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Export Waiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Export Exporting");
break;
case AVAssetExportSessionStatusFailed:
{
NSError *error = [_assetExport error];
NSLog(#"Export failed: %#", [error localizedDescription]);
break;
}
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
if (_success == true) {
ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
[assetLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileUrl completionBlock:^(NSURL *assetURL, NSError *error){
NSError *removeError = nil;
[[NSFileManager defaultManager] removeItemAtURL:outputFileUrl error:&removeError];
}];
}
}
];
}
Thanks in Advance
Please try this one ..
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//audio File
NSMutableArray *loTempArr = [[[Database sharedDBDetails] getAllUserDetails:kaudioTable] mutableCopy];
TempFile *lotemp1 = [[TempFile alloc] init];
TempFile *loTemp2 = [[TempFile alloc] init];
loTemp2 = [mallVideoArray objectAtIndex:self.slectedVideoIndex];
for (int i = 0; i < [loTempArr count]; i++)
{
lotemp1 = [loTempArr objectAtIndex:i];
if (loTemp2.mTemp_Key == [lotemp1.mTemp_videorefID intValue])
{
//NSLog(#"%#",lotemp1.mTemp_AudioName);
NSString *filepath = [kDocument_Path stringByAppendingString:[NSString stringWithFormat:#"/audioFolder/%#",lotemp1.mTemp_AudioName]];
NSURL *SongURL = [NSURL fileURLWithPath:filepath];
self.audioAsset = [[AVURLAsset alloc] initWithURL:SongURL options:nil];
CMTime time2=CMTimeMake([lotemp1.mTemp_timeinvideo doubleValue]*600, 600);
AVMutableCompositionTrack *compositionCommentaryTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeSubtract(self.videoAsset.duration, time2))
ofTrack:[[self.audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:time2 error:nil];
}
}
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform transform = CGAffineTransformIdentity;//
transform = videoAssetTrack.preferredTransform;
[videolayerInstruction setTransform:transform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
// 3.3 - Add instructions
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
float renderWidth, renderHeight;
renderWidth = self.movieController.view.frame.size.width;
renderHeight = self.movieController.view.frame.size.height;
CGSize size;
if(flipActionFlag == 4 || flipActionFlag == 5)
{
size = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}
else
{
size = videoAssetTrack.naturalSize;
}
//NSLog(#"%#",NSStringFromCGSize(size));
mainCompositionInst.renderSize = size;//size//CGSizeMake(renderWidth, renderHeight)
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// 4 - Get path
TempFile *mnewtemp = [[TempFile alloc] init];
mnewtemp.mTemp_videoName = [NSString stringWithFormat:#"Video_%d.m4v",loTemp.mTemp_Key+1];
[[Database sharedDBDetails] insertNewRowWithData:mnewtemp forTable:kvideoTable];
NSString *myPathDocs = [kDocument_Path stringByAppendingPathComponent:
[NSString stringWithFormat:#"Video/Video_%d.m4v",loTemp.mTemp_Key+1]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;//#"com.apple.quicktime-movie";AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
int exportStatus = exporter.status;
NSLog(#"exportStatus = %d",exportStatus);
switch (exportStatus)
{
case AVAssetExportSessionStatusFailed: {NSError *exportError = exporter.error;NSLog (#"AVAssetExportSessionStatusFailed: %#", exportError);break;}
case AVAssetExportSessionStatusCompleted: { NSLog (#"AVAssetExportSessionStatusCompleted--"); break;}
case AVAssetExportSessionStatusUnknown: { NSLog (#"AVAssetExportSessionStatusUnknown"); break;}
case AVAssetExportSessionStatusExporting: { NSLog (#"AVAssetExportSessionStatusExporting"); break;}
case AVAssetExportSessionStatusCancelled: { NSLog (#"AVAssetExportSessionStatusCancelled"); break;}
case AVAssetExportSessionStatusWaiting:{NSLog (#"AVAssetExportSessionStatusWaiting"); break;}
default: { NSLog (#"didn't get export status"); break;}
}
dispatch_async(dispatch_get_main_queue(), ^
{
[self exportDidFinish:exporter];
});
}];
}}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
[losaveView removeFromSuperview];
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error)
{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
else
{
self.mallVideoArray = [[[Database sharedDBDetails] getAllUserDetails:kvideoTable] mutableCopy];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album"
delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
});
}];
}
}
}
I got solution for my question.
- (IBAction)MergeAndSave:(id)sender
{
NSString *str=[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:#"bgsong.mp3"];
NSString* audio_inputFilePath =str;
NSURL *SongURL =[NSURL fileURLWithPath:audio_inputFilePath];
audioAsset = [AVAsset assetWithURL:SongURL];
NSString* video_inputFileName = #"movie.mp4";
NSArray *paths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
firstAsset = [AVAsset assetWithURL:video_inputFileUrl];
secondAsset = [AVAsset assetWithURL:video_inputFileUrl];
if(firstAsset !=nil && secondAsset!=nil){
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil];
//AUDIO TRACK
if(audioAsset!=nil){
AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(self.view.frame.size.width, self.view.frame.size.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo.mov"]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self playVideo];
//[self performSelector:#selector(playVideo) withObject:nil afterDelay:2.0];
// [self exportDidFinish:exporter];
});
}];
}
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
if(session.status == AVAssetExportSessionStatusCompleted){
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
}else{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles: nil];
[alert show];
}
});
}];
}
}
audioAsset = nil;
firstAsset = nil;
secondAsset = nil;
}
i've created a method that trims and exports videos based on a given time range. It also rotates the video to landscape.
For some reason though, the AVAssetExportSession fails when attempting to process a video that previously was trimmed using UIVideoEditorController.
Anyone encountered this issue before?
I get this error:
AVAssetExportSessionStatusFailed: Error Domain=AVFoundationErrorDomain Code=-11841 "The operation couldn’t be completed. (AVFoundationErrorDomain error -11841.)"
For this method:
- (void) trimVideoWithRange: (CMTimeRange)range fromInputURL: (NSURL *)inputURL withCompletionHandler:(void (^)(BOOL success, NSURL *outputURL))handler;
{
AVAsset *asset = [AVURLAsset assetWithURL:inputURL];
AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *audioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
NSLog(#"%#, %#, %#", asset, videoTrack, audioTrack);
NSError *error;
// Create a video composition
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
error = nil;
[videoCompositionTrack insertTimeRange:videoTrack.timeRange ofTrack:videoTrack atTime:CMTimeMakeWithSeconds(0, NSEC_PER_SEC) error:&error];
NSLog(#"videoCompositionTrack timeRange: %lld, %lld", videoCompositionTrack.timeRange.start.value, videoCompositionTrack.timeRange.duration.value);
if(error)
NSLog(#"videoCompositionTrack error: %#", error);
AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
error = nil;
[audioCompositionTrack insertTimeRange:audioTrack.timeRange ofTrack:audioTrack atTime:CMTimeMakeWithSeconds(0, NSEC_PER_SEC) error:&error];
NSLog(#"audioCompositionTrack timeRange: %lld, %lld", audioCompositionTrack.timeRange.start.value, audioCompositionTrack.timeRange.duration.value);
if(error)
NSLog(#"audioCompositionTrack error: %#", error);
// Rotate video if needed
CGAffineTransform rotationTransform = videoTrack.preferredTransform;
// Create video composition
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderScale = 1.0;
videoComposition.renderSize = videoTrack.naturalSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
// Apply the transform which may have been changed
AVMutableVideoCompositionLayerInstruction *instruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[instruction setTransform:rotationTransform atTime:kCMTimeZero];
// Set the time range and layer instructions for the video composition
AVMutableVideoCompositionInstruction *videoTrackInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoTrackInstruction.layerInstructions = [NSArray arrayWithObject:instruction];
videoTrackInstruction.timeRange = range;
videoComposition.instructions = #[videoTrackInstruction];
// Check so that we can proceed with our desired output preset
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:composition];
if (![compatiblePresets containsObject:AVAssetExportPreset960x540])
{
// Nope.
if(handler)
handler(NO, nil);
return;
}
// Create export session with composition
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset960x540];
// Configure export session
exportSession.outputURL = [NSURL fileURLWithPath:pathToTemporaryOutput];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.videoComposition = videoComposition;
exportSession.shouldOptimizeForNetworkUse = YES;
// Export async
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
{
dispatch_async(dispatch_get_main_queue(), ^{
// Everything OK. Execute completion block with URL to rendered video
if(handler)
handler(exportSession.status == AVAssetExportSessionStatusCompleted, [NSURL fileURLWithPath:pathToTemporaryOutput]);
});
}
break;
case AVAssetExportSessionStatusFailed:
{
NSError *exportError = exportSession.error;
NSLog(#"AVAssetExportSessionStatusFailed: %#", exportError.description);
dispatch_async(dispatch_get_main_queue(), ^{
// No go. Execute handler with fail.
if(handler)
handler(NO, nil);
});
}
break;
}
}];
}
This works for me. Here exportSession is AVAssetExportSession
NSURL *videoFileUrl = [NSURL fileURLWithPath:self.originalVideoPath];
AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:videoFileUrl options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:anAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality]) {
self.exportSession = [[AVAssetExportSession alloc]
initWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
// Implementation continues.
NSURL *furl = [NSURL fileURLWithPath:self.tmpVideoPath];
self.exportSession.outputURL = furl;
//provide outputFileType acording to video format extension
self.exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = CMTimeMakeWithSeconds(self.startTime, anAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(self.stopTime-self.startTime, anAsset.duration.timescale);
CMTimeRange range = CMTimeRangeMake(start, duration);
self.exportSession.timeRange = range;
self.self.btnTrim.hidden = YES;
self.myActivityIndicator.hidden = NO;
[self.myActivityIndicator startAnimating];
[self.exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([self.exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[self.exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
NSLog(#"Triming Completed");
dispatch_async(dispatch_get_main_queue(), ^{
[self.myActivityIndicator stopAnimating];
self.myActivityIndicator.hidden = YES;
});
break;
}
}];
}
I have a weird problem. In my app I am combining multiple audio and video files using the code below. The resulted video seems to work fine once I downloaded it from the device to the computer and play with Quick Time, but whenever I am trying to play the newly composed video using either UIWebView or AVPLayer I can only see first part of merged video files.
Furthermore when I tried to use MPMoviePlayerController to play it hangs on "Loading".
I can hear audio for all composition. To make it clear I have two arrays:
1- audioPieces with paths to audio files [song1, song2, song3];
2- moviePieces with paths to video files [movie1,movie2,movie3];
After merging those files I can see only movie1 but I can hear song1 + song2 + song3.
P.S. songs and movies have different lengths (Less than 0.2s difference).
Any help will be appreciated.
Thank you in advance,
Janusz
-(void)putFilesTogether{
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack =[[AVMutableCompositionTrack alloc]init];
AVMutableCompositionTrack *audioCompositionTrack =[[AVMutableCompositionTrack alloc]init];
NSLog(#" movie %# audio %# ", moviePieces, audioPieces);
NSError * error;
for(int i=0;i<moviePieces.count;i++)
{
NSFileManager * fm = [NSFileManager defaultManager];
NSString * movieFilePath;
NSString * audioFilePath;
movieFilePath = [moviePieces objectAtIndex:i];
audioFilePath = [audioPieces objectAtIndex:i];
if(![fm fileExistsAtPath:movieFilePath]){
NSLog(#"Movie doesn't exist %# ",movieFilePath);
}
else{
NSLog(#"Movie exist %# ",movieFilePath);
}
if(![fm fileExistsAtPath:audioFilePath]){
NSLog(#"Audio doesn't exist %# ",audioFilePath);
}
else{
NSLog(#"Audio exists %# ",audioFilePath);
}
NSURL *videoUrl = [NSURL fileURLWithPath:movieFilePath];
NSURL *audioUrl = [NSURL fileURLWithPath:audioFilePath];
AVURLAsset *videoasset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVAssetTrack *videoAssetTrack= [[videoasset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVURLAsset *audioasset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVAssetTrack *audioAssetTrack= [[audioasset tracksWithMediaType:AVMediaTypeAudio] lastObject];
videoCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime tempTime = mixComposition.duration;
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset.duration) ofTrack:audioAssetTrack atTime:tempTime error:&error];
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoasset.duration) ofTrack:videoAssetTrack atTime:tempTime error:&error];
if(error)
{
NSLog(#"Ups. Something went wrong! %#", [error debugDescription]);
}
}
NSDate *now = [NSDate dateWithTimeIntervalSinceNow:0];
NSString *caldate = [now description];
float ran = arc4random()%1000;
NSString * pathToSave = [NSString stringWithFormat:#"Output%#%f.mp4",caldate,ran];
pathToSave =[DOCUMENTS_FOLDER stringByAppendingPathComponent:pathToSave];
NSURL *movieUrl = [NSURL fileURLWithPath:pathToSave];
AVAssetExportSession *exporter =[[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
exporter.outputFileType=AVFileTypeQuickTimeMovie;
exporter.outputURL=movieUrl;
exporter.shouldOptimizeForNetworkUse=YES;
CMTimeValue val = mixComposition.duration.value;
CMTime start=CMTimeMake(0, 600);
CMTime duration=CMTimeMake(val, 600);
CMTimeRange range=CMTimeRangeMake(start, duration);
exporter.timeRange=range;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:{
NSLog(#"Export failed: %# %#", [[exporter error] localizedDescription],[[exporter error]debugDescription]);
NSString * message = #"Movie wasn't created. Try again later.";
[self performSelectorOnMainThread:#selector(dismissMe:) withObject:message waitUntilDone:NO];
break;}
case AVAssetExportSessionStatusCancelled:{ NSLog(#"Export canceled");
NSString * message1 = #"Movie wasn't created. Try again later.";
[self performSelectorOnMainThread:#selector(dismissMe:) withObject:message1 waitUntilDone:NO];
break;}
case AVAssetExportSessionStatusCompleted:
{
NSString * message = #"Movie was successfully created.";
CMTime duration = mixComposition.duration;
[self saveData:duration ofPath:pathToSave];
[self cleanFiles];
[self performSelectorOnMainThread:#selector(dismissMe:) withObject:message waitUntilDone:NO];
}
}}];
}
The problem lays in:
videoCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
They need to be moved outside the for loop body.