Concat video sequences - ios

I'm getting AVAssetExportSessionStatusFailed when trying to concat video sequences with this message:
Export Failed with error messsage: Error
Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped"
UserInfo=0x170675cc0 {NSLocalizedDescription=Operation Stopped,
NSLocalizedFailureReason=The video could not be composed.}, Operation
Stopped
Here is my code:
self.finalComposition = [AVMutableComposition composition];
self.finalCompositionTrack = [_finalComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
self.finalCompositionAudioTrack = [_finalComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime currentTime = kCMTimeZero;
AVURLAsset *asset = nil;
NSMutableArray *audioTracks = [[NSMutableArray alloc] init];
NSMutableArray *videos = [[NSMutableArray alloc] init];
for (int videoCounter = 0; videoCounter < _videoArray.count ; videoCounter++)
{
id object = [_videoArray objectAtIndex:videoCounter];
if ([object isKindOfClass:[MVideoRecord class]])
{
MVideoRecord *video = object;
NSURL *url = [NSURL fileURLWithPath:video.pathToVideo];
NSFileManager *fileManager = [NSFileManager defaultManager];
if (![fileManager fileExistsAtPath:video.pathToVideo])
{
[self showError:#"Invalid video"];
}
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
asset = [AVURLAsset URLAssetWithURL:url options:options];
NSError *error = nil;
if (!self.videoCompostion)
{
self.videoCompostion = [AVMutableVideoComposition videoComposition];
self.videoCompostion.frameDuration = CMTimeMake(1, 30);
self.videoCompostion.renderSize = CGSizeMake(640, 360);
self.videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
self.videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:self.finalCompositionTrack];
}
for (AVAssetTrack *track in asset.tracks)
{
CGSize size = track.naturalSize;
if(track.naturalTimeScale == 600)
{
CGAffineTransform transform = [track preferredTransform];
int orientation = [self orientationForTrack: asset];
if (orientation < 2)
{
float x = 640/size.width;
float y = 360/size.height;
CGAffineTransform videoScale = CGAffineTransformMakeScale(x, y);
[_videoCompositionLayerInstruction setTransform:CGAffineTransformConcat(transform, videoScale) atTime:currentTime]; }
else
{
float s = 480/size.height;
CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
[_videoCompositionLayerInstruction setTransform:new atTime:currentTime];
}
if (![_finalCompositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(video.videoStart.doubleValue, 600), CMTimeMakeWithSeconds(video.videoEnd.doubleValue, 600)) ofTrack:track atTime:currentTime error:&error])
{
[self showError:error.localizedFailureReason];
}
}
else if (track.naturalTimeScale == 44100)
{
CMTime start = kCMTimeZero;
CMTime duration = CMTimeMakeWithSeconds(video.videoEnd.doubleValue, 600);
NSError *error;
[finalCompositionAudioTrack insertTimeRange:CMTimeRangeMake(start, duration)
ofTrack:[[track.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:currentTime error:&error];
NSLog(#"%#", error);
}
}
currentTime = CMTimeAdd(currentTime, CMTimeMake(video.videoEnd.doubleValue*600, 600));
}
}
//apply the translation to video composition
_videoCompositionInstruction.layerInstructions = [NSArray arrayWithObject: _videoCompositionLayerInstruction];
_videoCompostion.instructions = [NSArray arrayWithObject:_videoCompositionInstruction];
//get filepath of last object...
MVideoRecord *lastRecord = [_videoArray objectAtIndex:_videoArray.count - 2];
NSString *finalExportURLString = [lastRecord.pathToVideo stringByReplacingOccurrencesOfString:#".MOV" withString:#"_finalExport.mp4"];
//testing fix for video missing audio after final export
//string = [exportURL.absoluteString stringByReplacingOccurrencesOfString:#".MOV" withString:#"_finalExport.MOV"];
// File Management
NSFileManager *fileManager = [NSFileManager defaultManager];
self.finalExportURL = [NSURL fileURLWithPath:finalExportURLString];
self.finalExportSession = [[AVAssetExportSession alloc] initWithAsset:_finalComposition presetName:TEST_EXPORT_SESSION_QUALITY];
if ([fileManager fileExistsAtPath:finalExportURL.path])
{
NSError *fileError = nil;
if (![fileManager removeItemAtPath:finalExportURLString error:&fileError])
{
DCLog(#"Error removing old path: %#", fileError.localizedDescription);
}
}
_finalExportSession.outputURL = self.finalExportURL;
_finalExportSession.outputFileType = #"public.mpeg-4";
_finalExportSession.videoComposition = self.videoCompostion;
[self.finalExportSession exportAsynchronouslyWithCompletionHandler:^{
switch (_finalExportSession.status)
{ case AVAssetExportSessionStatusFailed:
{
DCLog(#"Export Failed with error messsage: %#, %#", _finalExportSession.error, _finalExportSession.error.localizedDescription);
break;
}
case AVAssetExportSessionStatusCompleted:
{
DCLog(#"Export Success");
break;
}
};
}];
What am I doing wrong?
The most weird, if I change:
[finalCompositionAudioTrack insertTimeRange:CMTimeRangeMake(start, duration)
ofTrack:[[track.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:currentTime error:&error];
to:
[finalCompositionAudioTrack insertTimeRange:CMTimeRangeMake(start, duration)
ofTrack:[[track.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:&error];
It works, but of course, the audio is played wrong. The audio for first video plays for the second one.

I fixed my problem by creating AVMutableCompositionTrack for each audio track. I moved the code below to inside the loop and it worked.
compositionAudioTrack = [_finalComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

Related

Making an AVFileTypeMPEG4 video file with AVAssetExportSession and AVMutableComposition

I am using the library "OWVideoProcessor" to cut parts of the live recording video. The video works fine on any apple devices, but when i play it on browser(Dropbox) it has some seconds added in front and the audio is also missing from those seconds added in front. You can see examples of this videos here: https://www.dropbox.com/s/2vyhqlfgfh6gzlk/file32167%281%29.mp4?dl=0
If you download the video on an apple device the video has 20 sec. if you play it in browser it has 29 sec.
This is the code for stitching the video:
- (void)stitchVideoWithDestinationPath:(NSString *)destinationPath completion:(void(^)(NSError *error))completion {
[self.exportSession cancelExport];
NSLog(#"export started to path: %#", destinationPath);
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime startTime = kCMTimeZero;
int lastIndex = self.segmentStart + self.segmentCount - 1;
NSLog(#"Stitching segments in interval: [%d - %d]", self.segmentStart, lastIndex);
for (int i = self.segmentCount - 5; i < lastIndex; i++) {
CMTimeShow(startTime);
NSURL *url = [OWUtilities urlForRecordingSegmentCount:i basePath:self.basePath];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:#{AVURLAssetPreferPreciseDurationAndTimingKey: #(YES)}];
NSAssert(asset, #"Invalid asset at: %#", url);
BOOL hasAllTracks = [[asset tracks] count] >= 2;
if (hasAllTracks) {
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVAssetTrack *track = nil;
track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[videoTrack insertTimeRange:timeRange ofTrack:track atTime:startTime error:nil];
track = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:timeRange ofTrack:track atTime:startTime error:nil];
startTime = CMTimeAdd(startTime, asset.duration);
}
}
NSTimeInterval segmentsDuration = CMTimeGetSeconds(startTime);
NSLog(#"Total segments duration: %.2f", segmentsDuration);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
if (![[NSFileManager defaultManager] fileExistsAtPath:destinationPath]) {
NSArray *filePathsArray = [NSArray new];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
filePathsArray = [[NSFileManager defaultManager] subpathsOfDirectoryAtPath:documentsDirectory error:nil];
documentsDirectory = [documentsDirectory stringByAppendingString:#"/uploads/"];
documentsDirectory = [documentsDirectory stringByAppendingString:[destinationPath lastPathComponent]];
if([[NSFileManager defaultManager] fileExistsAtPath:documentsDirectory]) {
destinationPath = documentsDirectory;
}
}
exporter.outputURL = [NSURL fileURLWithPath:destinationPath];
exporter.outputFileType = AVFileTypeMPEG4;
BOOL trimRange = (segmentsDuration > self.outputSegmentDuration);
if (trimRange) {
CMTime duration = CMTimeMakeWithSeconds(self.outputSegmentDuration, startTime.timescale);
NSTimeInterval startInterval = segmentsDuration - self.outputSegmentDuration;
CMTime start = CMTimeMakeWithSeconds(startInterval, startTime.timescale);
exporter.timeRange = CMTimeRangeMake(start, duration);
NSLog(#"Exporting segment:");
CMTimeRangeShow(exporter.timeRange);
NSTimeInterval segmentsDuration2 = CMTimeGetSeconds(duration);
NSLog(#"Total segments duration: %.2f", segmentsDuration2);
}
#weakify(self, exporter);
[exporter exportAsynchronouslyWithCompletionHandler:^{
#strongify(self, exporter);
NSLog(#"error: %#", exporter.error);
if (completion && (exporter.status != AVAssetExportSessionStatusCancelled)) {
completion(exporter.error);
} else {
completion(nil);
}
if (self.exportSession == exporter) {
self.exportSession = nil;
}
}];
self.exportSession = exporter;
}
The problem was not in the code above. The problem was here:
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:width], AVVideoWidthKey,
[NSNumber numberWithInteger:height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger: bps ], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:300], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
This code is used to set up the video compression settings. The AVVideoAverageBitRateKey is set to low (like 600 kbit/s) and the AVVideoMaxKeyFrameIntervalKey was set too big. So i changed AVVideoMaxKeyFrameIntervalKey to 1 and i increased the AVVideoAverageBitRateKey to 5000 kbit/s. This solved my issue.
This code was written to decrease the video size. You can change it in the OWVideoProcessor library.

iOS AVAssetExportSession failed Code=-11820 only iPhone 5(c)

I want to export a video file from a composition with two video's (with audio) and one audio track. It works fine for iPhone 5s and later, but it fails on a iPhone 5c (iOS 9.2.1). The error is returned on this:
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:#selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Export error: %#", _assetExport.error);
[self performSelectorOnMainThread:#selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
The log that it printed:
Export error: Error Domain=AVFoundationErrorDomain Code=-11820 "Cannot Complete Export" UserInfo={NSLocalizedRecoverySuggestion=Try exporting again., NSLocalizedDescription=Cannot Complete Export}
As stated, on my iPhone 5s, 6 and 6s it works very good, but only on my iPhone 5c it returns this error. Hopefully someone has experience with this.
The full code for creating the tracks and composition:
- (void) generateVideoWithInputPath:(NSString*)inputVideo andAudioFileName:(NSString*)audioFileName andVolume:(float)volume {
NSString* introVideoPath = [[NSBundle mainBundle] pathForResource:#"IntroVideo" ofType:#"mp4"];
NSURL* introVideoUrl = [NSURL fileURLWithPath:introVideoPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:inputVideo];
self.outputAssetURL = NULL;
self.outputFilePath = finalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
unlink([self.outputFilePath UTF8String]); // remove existing result
// Create composition
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Create Asset for introVideo
AVURLAsset* introVideoAsset = [[AVURLAsset alloc] initWithURL:introVideoUrl options:nil];
// Create time ranges
CMTime introStartTime = kCMTimeZero;
CMTime introEndTime = introVideoAsset.duration;
CMTimeRange introVideo_timeRange = CMTimeRangeMake(introStartTime, introEndTime);
//add VideoTrack of introVideo to composition
NSArray* introVideoAssetTracks = [introVideoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* introVideoAssetTrack = ([introVideoAssetTracks count] > 0 ? [introVideoAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:introVideo_timeRange ofTrack:introVideoAssetTrack atTime:introStartTime error:nil];
// Add AudioTrack of introVideo to composition
NSArray* audioAssetTracksIntro = [introVideoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrackIntro = ([audioAssetTracksIntro count] > 0 ? [audioAssetTracksIntro objectAtIndex:0] : nil);
AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:introVideo_timeRange ofTrack:audioAssetTrackIntro atTime:introStartTime error:nil];
// Create Asset for inputVideo
CMTime nextClipStartTime = introEndTime;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// Create time ranges
CMTime videoStartTime = kCMTimeZero;
CMTime videoEndTime = videoAsset.duration;
if (CMTIME_IS_INVALID(videoEndTime)) {
NSLog(#"videoEndTime is invalid");
}
CMTimeRange mainVideo_timeRange = CMTimeRangeMake(videoStartTime, videoEndTime);
// Add VideoTrack of inputVideo to composition
NSArray* videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
// CMTime audioDurationFix = CMTimeAdd(videoAsset.duration, CMTimeMakeWithSeconds(-1.0f, 1));
// CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioDurationFix);
AVMutableCompositionTrack* a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:mainVideo_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];
// Add AudioTrack of inputVideo to composition
NSArray* audioAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack2 = ([audioAssetTracks2 count] > 0 ? [audioAssetTracks2 objectAtIndex:0] : nil);
//AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack2 atTime:nextClipStartTime error:nil];
AVMutableAudioMix* audioMix = NULL;
if (audioFileName) {
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFileName];
// Create Asset for audio (song)
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
// Add Audio of song to composition
NSArray* audioAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioAssetTrack = ([audioAssetTracks count] > 0 ? [audioAssetTracks objectAtIndex:0] : nil);
AVMutableCompositionTrack* b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack atTime:nextClipStartTime error:nil];
// Set Volume of song
NSArray *tracksToDuck = [mixComposition tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *trackMixArray = [NSMutableArray array];
// for (int i = 0; i < [tracksToDuck count]; i++) {
AVAssetTrack *leTrack = [tracksToDuck objectAtIndex:0];
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack];
[trackMix setVolume:1 atTime:kCMTimeZero];
[trackMixArray addObject:trackMix];
AVAssetTrack *leTrack2 = [tracksToDuck objectAtIndex:1];
AVMutableAudioMixInputParameters *trackMix2 = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack2];
[trackMix2 setVolume:volume atTime:kCMTimeZero];
[trackMixArray addObject:trackMix2];
// }
audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = trackMixArray;
}
// Export composition to videoFile
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie; //#"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = [self getVideoComposition:videoAsset intro:introVideoAsset composition:mixComposition];
// Set song volume audio
if (audioMix != NULL) {
_assetExport.audioMix = audioMix;
}
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[self performSelectorOnMainThread:#selector(videoIsDone) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Export error: %#", _assetExport.error);
[self performSelectorOnMainThread:#selector(videoHasFailed) withObject:nil waitUntilDone:YES];
}
}
];
}
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset intro:(AVAsset *)intro composition:( AVMutableComposition*)composition{
AVMutableCompositionTrack *compositionIntroTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *audioTracksArray = [intro tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *introTrack;
if (audioTracksArray.count > 0) {
introTrack = [audioTracksArray objectAtIndex:0];
[compositionIntroTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, intro.duration) ofTrack:introTrack atTime:kCMTimeZero error:nil];
}
NSArray *videoTracksArray = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack;
if (videoTracksArray.count > 0) {
videoTrack = [videoTracksArray objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:intro.duration error:nil];
}
AVMutableVideoCompositionLayerInstruction *firstLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionIntroTrack];
AVMutableVideoCompositionLayerInstruction *secondLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGSize videoSize;
if (videoTrack && introTrack) {
CGSize trackDimensions = [videoTrack naturalSize];
videoSize = CGSizeMake(0, 0);
// turn around for portrait
if (trackDimensions.height>trackDimensions.width) {
videoSize = CGSizeMake(trackDimensions.width, trackDimensions.height);
} else {
videoSize = CGSizeMake(trackDimensions.height, trackDimensions.width);
}
CGAffineTransform transform = videoTrack.preferredTransform;
CGAffineTransform scale = CGAffineTransformMakeScale((videoSize.width/introTrack.naturalSize.width),(videoSize.height/introTrack.naturalSize.height));
[firstLayerInst setTransform:scale atTime:kCMTimeZero];
[secondLayerInst setTransform:transform atTime:kCMTimeZero];
} else {
videoSize = [[FilteringClass sharedFilteringClass] getVideoSize];
}
CMTime totalTime = CMTimeAdd(asset.duration, intro.duration);
NSLog(#"Total videotime: %lld", totalTime.value);
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
inst.layerInstructions = [NSArray arrayWithObjects:firstLayerInst, secondLayerInst, nil];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
In my opinion you are hitting the decoder limit set in AVFoundation.In iOS 5 the decoder limit is 4 and in iOS 6 it is 16 ,so try to export small size video if it is working means the problem is with your video file ...might be it exceeds the decode set limit.

AVAssetExportSession intermittent error 11820 "Cannot Complete Export" Suggestion=Try exporting again

EXPORT STATUS 4 Error Domain=AVFoundationErrorDomain Code=-11820 "Cannot Complete Export" UserInfo={NSLocalizedDescription=Cannot Complete Export, NSLocalizedRecoverySuggestion=Try exporting again.}
I'm experiencing an intermittent error when trying to export an AVMutableComposition containing AVMutableVideoCompositionLayerInstruction (s) and an AVMutableVideoComposition using an AVAssetExportSession.
The objective is to merge an unlimited number of videos and applying transitions between clips using layerInstructions.
P.S. The error is not consistent. It works when attempting to merge 5 clips and 18 clips, but doesn't work when attempting to merge 17 clips.
I've posted my code below. Any help is greatly appreciated.
EDIT: It seems the issue is related to the creation of multiple AVMutableCompositionTrack(s). If more than 15 or 16 are created, the error occurs. However, creating multiple AVMutableCompositionTrack(s), I believe, is necessary to overlap all the videos and create overlapping transitions.
EDIT 2: When shorter videos are selected, more videos are processed before the error occurs. Accordingly, it looks like a memory issue whereby tracks are being deallocated. However, there doesn't seem to be a memory leak based on the memory management tool.
-(void)prepareMutableCompositionForPlayback{
AVMutableComposition *mutableComposition = [[AVMutableComposition alloc] init];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.backgroundColor = [[UIColor blackColor] CGColor];
NSMutableArray *instructionsArray = [[NSMutableArray alloc] init];
videoStartTime = kCMTimeZero;
for(int i = 0; i < videoAssetsArray.count; i++){
AVAsset *videoAsset = [videoAssetsArray objectAtIndex:i];
CMTime currentVideoDuration = [videoAsset duration];
AVMutableCompositionTrack *videoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentVideoDuration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoStartTime error:nil];
CGSize videoSize = [videoTrack naturalSize];
if([videoAsset tracksWithMediaType:AVMediaTypeAudio].count > 0){
AVMutableCompositionTrack *audioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentVideoDuration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:videoStartTime error:nil];
}
//INSTRUCTIONS - TRANSITIONS
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
int transitionNumber = [[videoTransitionsArray objectAtIndex:i] intValue];
float transitionDuration = [[videoTransitionsDurationArray objectAtIndex:i] floatValue];
if(i == 0){
[layerInstruction setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:CMTimeRangeMake(CMTimeSubtract(currentVideoDuration, CMTimeMakeWithSeconds(transitionDuration, 600)), CMTimeMakeWithSeconds(transitionDuration, 600))];
}
else{
int previousTransitionNumber = [[videoTransitionsArray objectAtIndex:i - 1] intValue];
float previousTransitionDuration = [[videoTransitionsDurationArray objectAtIndex:i - 1] floatValue];
if(i < videoAssetsArray.count - 1){
[layerInstruction setOpacityRampFromStartOpacity:1.0 toEndOpacity:1.0 timeRange:CMTimeRangeMake(videoStartTime, CMTimeMakeWithSeconds(previousTransitionDuration, 600))];
[layerInstruction setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:CMTimeRangeMake(CMTimeAdd(videoStartTime, CMTimeSubtract(currentVideoDuration, CMTimeMakeWithSeconds(transitionDuration, 600))), CMTimeMakeWithSeconds(transitionDuration, 600))];
}
else{
[layerInstruction setOpacityRampFromStartOpacity:1.0 toEndOpacity:1.0 timeRange:CMTimeRangeMake(videoStartTime, CMTimeMakeWithSeconds(previousTransitionDuration, 600))];
}
}
[instructionsArray addObject:layerInstruction];
if(i < videoAssetsArray.count - 1){
//TAKING INTO ACCOUNT THE TRANSITION DURATION TO OVERLAP VIDEOS
videoStartTime = CMTimeAdd(videoStartTime, CMTimeSubtract(currentVideoDuration, CMTimeMakeWithSeconds(transitionDuration, 600)));
}
else{
//TRANSITION NOT APPLIED TO THE END OF THE LAST CLIP
videoStartTime = CMTimeAdd(videoStartTime, currentVideoDuration);
}
}
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,videoStartTime);
mainInstruction.layerInstructions = instructionsArray;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObjects:mainInstruction,nil];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = CGSizeMake(1920, 1080);
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"videoRecordingFinalOutput.mov"];
NSURL *videoOutputURL = [[NSURL alloc] initFileURLWithPath:videoOutputPath];
AVAssetExportSession *videoExportSession = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
videoExportSession.outputURL = videoOutputURL;
videoExportSession.videoComposition = videoComposition;
videoExportSession.outputFileType = AVFileTypeQuickTimeMovie;
[videoExportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog(#"EXPORT STATUS %ld %#", (long)videoExportSession.status, videoExportSession.error);
if(videoExportSession.error == NULL){
NSLog(#"EXPORT SUCCESSFUL");
[library writeVideoAtPathToSavedPhotosAlbum:videoOutputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if(error) {
NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:videoOutputPath]){
[[NSFileManager defaultManager] removeItemAtPath:videoOutputPath error:&error];
if(error){
NSLog(#"VIDEO FILE DELETE FAILED");
}
else{
NSLog(#"VIDEO FILE DELETED");
}
}
}
else{
NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:videoOutputPath]){
[[NSFileManager defaultManager] removeItemAtPath:videoOutputPath error:&error];
if(error){
NSLog(#"VIDEO FILE DELETE FAILED");
}
else{
NSLog(#"VIDEO FILE DELETED");
}
}
}
}];
}
else{
NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:videoOutputPath]){
[[NSFileManager defaultManager] removeItemAtPath:videoOutputPath error:&error];
if(error){
NSLog(#"VIDEO FILE DELETE FAILED");
}
else{
NSLog(#"VIDEO FILE DELETED");
}
}
}
}];
}
Instead of creating a new videoTracks for each clip, Why dont you try using just 2 videoTracks and insert timeRanges in these 2. And give transitions between the 2 tracks
so first video will be inserted in videoTrack1 the second on videoTrack2, so that transition can be applied then insert the third clip in track one again and so on.

How to add artwork in audio file, to show in album cover?

I have one audio file and want to change its album cover artwork. So, is it possible? And, how can i set the artwork in album cover for audio file in iOS programming?
Actually, i merged two audio file and want to add artwork for album cover which will show in iTune.
Code is given below:
- (BOOL) combineVoices1
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:#"Caches"];
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:#"/%#.m4a",textFieldMixFile.text];
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath];
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file>
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file>
if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
//CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix1;
if (playbackDelayAfterTimeMix1 > 0) {
nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1);
}else{
nextClipStartTimeMix1 = kCMTimeZero;
}
CMTime startTimeMix1;
if (playbackDelayMix1 > 0) {
startTimeMix1 = CMTimeMake(playbackDelayMix1, 1);
}else{
startTimeMix1 = kCMTimeZero;
}
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1];
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack;
if (tracks.count > 0) {
clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil];
//avAsset.commonMetadata
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix2;
if (playbackDelayAfterTimeMix2 > 0) {
nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1);
}else{
nextClipStartTimeMix2 = kCMTimeZero;
}
CMTime startTimeMix2;
if (playbackDelayMix2 > 0) {
startTimeMix2 = CMTimeMake(playbackDelayMix2, 1);
}else{
startTimeMix2 = kCMTimeZero;
}
[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2];
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1;
if (tracks1.count > 0) {
clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
[self performSelectorOnMainThread:#selector(performAction) withObject:nil waitUntilDone:NO];
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
[self performSelectorOnMainThread:#selector(hideSpinningWheel) withObject:nil waitUntilDone:NO];
[[NSTSharedData instance] showAlertForTitle:#"Error!" andMessage:[NSString stringWithFormat:#"%#",[[exportSession error] localizedDescription]]];
//NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}
}];
return YES;
}
I solved my problem and now it working fine, i added code near "AVAssetExportSession" in above code. And finally method is:
- (BOOL) combineVoices1
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:#"Caches"];
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:#"/%#.m4a",textFieldMixFile.text];
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath];
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file>
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file>
if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
//CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix1;
if (playbackDelayAfterTimeMix1 > 0) {
nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1);
}else{
nextClipStartTimeMix1 = kCMTimeZero;
}
CMTime startTimeMix1;
if (playbackDelayMix1 > 0) {
startTimeMix1 = CMTimeMake(playbackDelayMix1, 1);
}else{
startTimeMix1 = kCMTimeZero;
}
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1];
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack;
if (tracks.count > 0) {
clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil];
//avAsset.commonMetadata
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix2;
if (playbackDelayAfterTimeMix2 > 0) {
nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1);
}else{
nextClipStartTimeMix2 = kCMTimeZero;
}
CMTime startTimeMix2;
if (playbackDelayMix2 > 0) {
startTimeMix2 = CMTimeMake(playbackDelayMix2, 1);
}else{
startTimeMix2 = kCMTimeZero;
}
[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2];
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1;
if (tracks1.count > 0) {
clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil];
/**
added MetadataItem
**/
AVMutableMetadataItem *artistMetadata = [[AVMutableMetadataItem alloc] init];
artistMetadata.key = AVMetadataiTunesMetadataKeyArtist;
artistMetadata.keySpace = AVMetadataKeySpaceiTunes;
artistMetadata.locale = [NSLocale currentLocale];
artistMetadata.value = uTakeTheMicArtist;
AVMutableMetadataItem *albumMetadata = [[AVMutableMetadataItem alloc] init];
albumMetadata.key = AVMetadataiTunesMetadataKeyAlbum;
albumMetadata.keySpace = AVMetadataKeySpaceiTunes;
albumMetadata.locale = [NSLocale currentLocale];
albumMetadata.value = uTakeTheMicAlbum;
AVMutableMetadataItem *songMetadata = [[AVMutableMetadataItem alloc] init];
songMetadata.key = AVMetadataiTunesMetadataKeySongName;
songMetadata.keySpace = AVMetadataKeySpaceiTunes;
songMetadata.locale = [NSLocale currentLocale];
songMetadata.value = textFieldMixFile.text;
AVMutableMetadataItem *imageMetadata = [[AVMutableMetadataItem alloc] init];
imageMetadata.key = AVMetadataiTunesMetadataKeyCoverArt;
imageMetadata.keySpace = AVMetadataKeySpaceiTunes;
imageMetadata.locale = [NSLocale currentLocale];
imageMetadata.value = imageData; //imageData is NSData of UIImage.
NSArray *metadata = [NSArray arrayWithObjects:artistMetadata, albumMetadata, songMetadata, imageMetadata, nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
exportSession.metadata = metadata;
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
[self performSelectorOnMainThread:#selector(performAction) withObject:nil waitUntilDone:NO];
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
[self performSelectorOnMainThread:#selector(hideSpinningWheel) withObject:nil waitUntilDone:NO];
[[NSTSharedData instance] showAlertForTitle:#"Error!" andMessage:[NSString stringWithFormat:#"%#.",[[exportSession error] localizedDescription]]];
//NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}
}];
return YES;
}

AVMutableComposition reduces Size and Resolution of final video

I created a video using Array of images.
It creates the video successfully, then I add audio to that created video file.
I create AVMuatableComposition object, I add Video and audio by creating AVAssetTracks and finally export into single video file with help of AVAssetsExportSession.
Suppose the first video without Audio is vdo.mp4 and final (After adding audio) is final.mp4, So my final.mp4 is lower in size and resolution than the vdo.mp4
Here is my code which combines the both file,
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
NSURL *url = [NSURL fileURLWithPath:filePath];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error];
CMTime audioStartTime = kCMTimeZero;
for ( NSInteger i = 0;i< [mArrAudioFileNames count];i++ )
{
NSString *audioFileName = nil;
NSString *docsDir = nil;
if ( [mArrAudioFileNames objectAtIndex:i] != [NSNull null]) {
audioFileName = [mArrAudioFileNames objectAtIndex:i];
docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
}else{
//audioFileName = #" ";
docsDir = [[NSBundle mainBundle] pathForResource:#"sample" ofType:#"mp3"];
}
// NSString *docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:docsDir] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
Float64 duration = CMTimeGetSeconds(urlAsset.duration);
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) ((duration * kRecordingFPS) + 0.5), kRecordingFPS));
}
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
//assetExport.videoComposition = compositionVideoTrack;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;// #"com.apple.quicktime-movie";
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
[self performSelectorOnMainThread:#selector(creatingVideoDone:)
withObject:outFilePath waitUntilDone:NO];
[assetExport release];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
}
}];
Any help is appreciated.
Thanks.

Resources