Cutting video on iPhone - ios

I have a video file from the device camera -- stored as /private/var/mobile/Media/DCIM/100APPLE/IMG_0203.MOV, for example, and I need to cut the first 10 seconds of this video. What API or libraries I can use?

I found solution with standard API: AVAssetExportSession
- (void)getTrimmedVideoForFile:(NSString *)filePath withInfo:(NSArray *)info
{
//[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetLowQuality];
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSAllDomainsMask, YES);
// NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
// [manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
// outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
NSString *outputURL = [NSString stringWithFormat:#"/tmp/%#.mp4", [info objectAtIndex:2]];
NSLog(#"OUTPUT: %#", outputURL);
// Remove Existing File
// [manager removeItemAtPath:outputURL error:nil];
if (![manager fileExistsAtPath:outputURL]) {
exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = kCMTimeZero;
CMTime duration = kCMTimeIndefinite;
if ([[NSString stringWithFormat:#"%#", [info objectAtIndex:3]] floatValue] > 20.0) {
start = CMTimeMakeWithSeconds(1.0, 600);
duration = CMTimeMakeWithSeconds(10.0, 600);
}
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export Complete %d %#", exportSession.status, exportSession.error);
[self sendVideoPreview:info];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",exportSession.error);
// [self addToDelayed:info withAction:#"add"];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",exportSession.error);
// [self addToDelayed:info withAction:#"add"];
break;
default:
break;
}
}];
} else {
[self sendVideoPreview:info];
}

Related

How do I save slideShow of photos to camera role?

How do I save slideshow created in camera roll?
(NSString *))handler;
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
NSError * error = nil;
for (int i=0;i<[arrayOfSounds count];i++)
{
NSString *pathString = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:#"%#", [arrayOfSounds objectAtIndex:i]] ofType:#"mp3"];
NSLog(#"pathString = %#",pathString);
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
CMTime audioDuration = videoAsset.duration;
audioDurationSeconds = CMTimeGetSeconds(audioDuration);
int startDur= [[arrayOfTime objectAtIndex:i] intValue];
NSLog(#"startDur = %d",startDur);
CMTime audioStartTime = CMTimeMake(i,1);
CMTime presentTime = CMTimeMake(1, 1);
CMTime EndTime = CMTimeMake(audioDurationSeconds, 1);
CMTime audioEndTime = CMTimeAdd(presentTime, EndTime);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(presentTime,audioEndTime) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
}
NSString* movPath = [NSString stringWithFormat:#"%#/%#",documentsDirectory,#"Export.mov"];
//AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:movPath] options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSError* vidError;
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count >0)? [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]:nil atTime:kCMTimeZero error:&vidError];
NSLog(#"Vid error = %#",vidError);
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
NSString* videoName = #"exportFinal.mov";
NSString *exportPath = [NSString stringWithFormat:#"%#/%#",documentsDirectory,videoName];
NSLog(#"exportPath = %#",exportPath);
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
NSLog(#"file type %#",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(#"Export Complete");
//[self uploadToYouTube];
[self cleanUpProcess];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
}
- (void)cleanUpProcess{
NSError* error;
NSArray *files = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectory error:&error];
for(NSString* myFiles in files){
if([[myFiles pathExtension] isEqualToString:#"mov"]){
continue;
}
NSString* filePath = [NSString stringWithFormat:#"%#/%#",documentsDirectory,myFiles];
if(![[NSFileManager defaultManager] removeItemAtPath:filePath error:&error]){
NSLog(#"Error Deleting");
}
}
}
It is saved in this path:
"Users/devendrasingh/Library/Developer/CoreSimulator/Devices/394AD1C9-E950-422E-BDA1-083CDCEE83F6/data/Containers/Data/Application/98BEC162-C686-4BC1-B698-567B45D65F27/Documents/exportFinal.mov
"
It's not saving in camera roll
Try this:
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
}];

Unsupported Header Format while conversation of file from .mov to .wav format and It happens only in below iOS 11 devices (like iOS 9,10)

Below is my code about what i have done for this .mov to .wav conversations :
- (void)mp4ForURL:(NSURL *)videoURL{
// Create the asset url with the video file
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
// Check if video is supported for conversion or not
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality])////************************************////
{
//Create Export session
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];////************************************////
//Creating temp path to save the converted video
NSString* documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString* myDocumentPath = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL *url = [[NSURL alloc] initFileURLWithPath:myDocumentPath];
//Check if the file already exists then remove the previous file
if ([[NSFileManager defaultManager]fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager]removeItemAtPath:myDocumentPath error:nil];
}
exportSession.outputURL = url;
//set the output file format if you want to make it in other file format (ex .3gp)
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export session failed");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
{
//Video conversion finished
NSLog(#"Successful!");
[self convertMP4toMP3withFile:myDocumentPath];
}
break;
default:
break;
}
}];
}
else
{
NSLog(#"Video file not supported!");
}
}
-(void)convertMP4toCAFwithFile:(NSString*)dstPath //Converted to Core Audio Format .caf
{
NSURL *dstURL = [NSURL fileURLWithPath:dstPath];
AVMutableComposition* newAudioAsset = [AVMutableComposition composition];
AVMutableCompositionTrack* dstCompositionTrack;
dstCompositionTrack = [newAudioAsset addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset* srcAsset = [AVURLAsset URLAssetWithURL:dstURL options:nil];
AVAssetTrack* srcTrack = [[srcAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange timeRange = srcTrack.timeRange;
NSError* error;
if(NO == [dstCompositionTrack insertTimeRange:timeRange ofTrack:srcTrack atTime:kCMTimeZero error:&error]) {
NSLog(#"track insert failed: %#\n", error);
return;
}
AVAssetExportSession* exportSesh = [[AVAssetExportSession alloc] initWithAsset:newAudioAsset presetName:AVAssetExportPresetPassthrough];
exportSesh.outputFileType = AVFileTypeCoreAudioFormat;
exportSesh.outputURL = dstURL;
[[NSFileManager defaultManager] removeItemAtURL:dstURL error:nil];
[exportSesh exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exportSesh.status;
NSLog(#"exportAsynchronouslyWithCompletionHandler: %li\n", (long)status);
if(AVAssetExportSessionStatusFailed == status) {
NSLog(#"FAILURE: %#\n", exportSesh.error);
} else if(AVAssetExportSessionStatusCompleted == status) {
NSLog(#"SUCCESS!\n");
NSError *error;
//append the name of the file in jpg form
//check if the file exists (completely unnecessary).
NSString *onlyPath = [dstPath stringByDeletingLastPathComponent];
NSInteger randomNumber = arc4random() % 100000;
strDateAndTime = [self getCurrentDateAndTime];
strAudioName = [NSString stringWithFormat:#"%#_%ld_%#.%#", #"Audio", (long)randomNumber, strDateAndTime, #"caf"];
NSString *toPathString = [NSString stringWithFormat:#"%#/%#", onlyPath, strAudioName];
[[NSFileManager defaultManager] moveItemAtPath:dstPath toPath:toPathString error:&error];
//[self uploadAudioOnAWSFromPath:toPathString];
[self convertToWavForFilePath:toPathString];
}
}];
}
- (void)convertToWavForFilePath:(NSString *)cafFilePath
{
// set up an AVAssetReader to read from the iPod Library
// NSString *cafFilePath=[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSInteger randomNumber = arc4random() % 100000;
_finalAudioName = [NSString stringWithFormat:#"%#_%ld_%#", #"Audio", (long)randomNumber, strDateAndTime];
// NSString *title = #"MyRec";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
__block NSString *wavFilePath = [[docDir stringByAppendingPathComponent :_finalAudioName]
stringByAppendingPathExtension:#"wav"];
_finalAudioName = [wavFilePath lastPathComponent];
if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeWAVE
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
CMTime sampleDuration = CMSampleBufferGetDuration(nextBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
float dProgress= CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(songAsset.duration);
NSLog(#"%f",dProgress);
}
else
{
[assetWriterInput markAsFinished];
[assetReader cancelReading];
[assetWriter finishWritingWithCompletionHandler:^{
[self uploadAudioOnAWSFromPath:wavFilePath];
}];
}
}
}];
}
In above code, the converted final file which is in .wav format that I have to send on AWS server to get text from it, For text conversation we used Google Speech API, and whenever I send file which is converted from below iOS 11 devices(like iOS 9,10), it shows me this error :
Error: WAV header indicates an unsupported format.
Above error is shown by Google Speech API while converting an Audio file(.wav) to Text format.
Above code is working perfectly with latest iOS 11 devices, only below iOS 11(like iOS 9,10) devices are not working properly.

Objective C/IOS - AVAsset - Error

I'm trying to use the following code to save a snippet of audio from a url stream (e.g., a radio station broadcast):
float vocalStartMarker = 0.0;
float vocalEndMarker = 20.0;
NSURL *audioFileInput = [[NSURL alloc] initWithString: #"http://streamedaudio.mp3"];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsPath = [paths objectAtIndex:0]; //Get the docs directory
NSString *filePath = [documentsPath stringByAppendingPathComponent:#"TestName.m4a"]; //Add the file name
//[audioFileInput writeToFile:filePath atomically:YES]; //Write the file
NSURL *audioFileOutput = [[NSURL alloc] initWithString: filePath];
NSLog(#"%#", audioFileOutput);
if (!audioFileInput || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVAsset *asset = [AVAsset assetWithURL:audioFileInput];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset
presetName:AVAssetExportPresetAppleM4A];
if (exportSession == nil)
{
return NO;
}
CMTime startTime = CMTimeMake((int)(floor(vocalStartMarker * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(vocalEndMarker * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
exportSession.timeRange = exportTimeRange;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
// It worked!
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
NSLog(#"%#", exportSession.error);
return;
}
}];
I get the following error:
2015-12-26 14:17:11.523 SaveURL[4135:24621] Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.}
Is this because i'm trying to convert an MP3 to M4A?
Thanks in advance for your help with this.
Application Transport Security restricts requests to HTTPS unless you explicitly allow it.

AVMutableComposition reduces Size and Resolution of final video

I created a video using Array of images.
It creates the video successfully, then I add audio to that created video file.
I create AVMuatableComposition object, I add Video and audio by creating AVAssetTracks and finally export into single video file with help of AVAssetsExportSession.
Suppose the first video without Audio is vdo.mp4 and final (After adding audio) is final.mp4, So my final.mp4 is lower in size and resolution than the vdo.mp4
Here is my code which combines the both file,
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
NSURL *url = [NSURL fileURLWithPath:filePath];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error];
CMTime audioStartTime = kCMTimeZero;
for ( NSInteger i = 0;i< [mArrAudioFileNames count];i++ )
{
NSString *audioFileName = nil;
NSString *docsDir = nil;
if ( [mArrAudioFileNames objectAtIndex:i] != [NSNull null]) {
audioFileName = [mArrAudioFileNames objectAtIndex:i];
docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
}else{
//audioFileName = #" ";
docsDir = [[NSBundle mainBundle] pathForResource:#"sample" ofType:#"mp3"];
}
// NSString *docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:docsDir] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
Float64 duration = CMTimeGetSeconds(urlAsset.duration);
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) ((duration * kRecordingFPS) + 0.5), kRecordingFPS));
}
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
//assetExport.videoComposition = compositionVideoTrack;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;// #"com.apple.quicktime-movie";
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
[self performSelectorOnMainThread:#selector(creatingVideoDone:)
withObject:outFilePath waitUntilDone:NO];
[assetExport release];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
}
}];
Any help is appreciated.
Thanks.

Trim video without displaying UIVideoEditorController?

Currently I'm working on a application which deals with the videos.
In my application user can trim the video, I have a custom control for selecting the start time and end time. I need to trim the video by these two values. I tried with UIVideoEditorController like follows.
UIVideoEditorController* videoEditor = [[[UIVideoEditorController alloc] init] autorelease];
videoEditor.delegate = self;
NSString* videoPath = [[NSBundle mainBundle] pathForResource:#"test" ofType:#"MOV"];
if ( [UIVideoEditorController canEditVideoAtPath:videoPath] )
{
videoEditor.videoPath = videoPath;
[self presentModalViewController:videoEditor animated:YES];
}
else
{
NSLog( #"can't edit video at %#", videoPath );
}
But the issue is the above code will display apple's video editor control and user can do some operations on that view. I don't want to display this view, because I have already displayed the video on MPMoviePlayer and received the user input (start time and end time) for trimming the video on a custom control.
How can I trim a video without displaying UIVideoEditorController ?
Finally I found the solution.
We can use AVAssetExportSession for trimming video without displaying UIVideoEditorController.
My code is like:
- (void)splitVideo:(NSString *)outputURL
{
#try
{
NSString *videoBundleURL = [[NSBundle mainBundle] pathForResource:#"Video_Album" ofType:#"mp4"];
AVAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:videoBundleURL] options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:asset];
if ([compatiblePresets containsObject:AVAssetExportPresetLowQuality])
{
[self trimVideo:outputURL assetObject:asset];
}
videoBundleURL = nil;
[asset release];
asset = nil;
compatiblePresets = nil;
}
#catch (NSException * e)
{
NSLog(#"Exception Name:%# Reason:%#",[e name],[e reason]);
}
}
This method trims the video
- (void)trimVideo:(NSString *)outputURL assetObject:(AVAsset *)asset
{
#try
{
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:asset presetName:AVAssetExportPresetLowQuality];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = CMTimeMakeWithSeconds(splitedDetails.startTime, 1);
CMTime duration = CMTimeMakeWithSeconds((splitedDetails.stopTime - splitedDetails.startTime), 1);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[self checkExportSessionStatus:exportSession];
[exportSession release];
exportSession = nil;
}
#catch (NSException * e)
{
NSLog(#"Exception Name:%# Reason:%#",[e name],[e reason]);
}
}
This method checks the status of trimming:
- (void)checkExportSessionStatus:(AVAssetExportSession *)exportSession
{
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export Completed");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Error in exporting");
break;
default:
break;
}
}];
}
I'm calling the splitVideo method from the export button action method and passes the output URL as argument.
We can import AVFoundation/AVFoundation.h
-(BOOL)trimVideofile
{
float videoStartTime;//define start time of video
float videoEndTime;//define end time of video
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:#"Caches"];
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
NSURL *videoFileOutput = [NSURL fileURLWithPath:OutputFilePath];
NSURL *videoFileInput;//<Path of orignal Video file>
if (!videoFileInput || !videoFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:videoFileOutput error:NULL];
AVAsset *asset = [AVAsset assetWithURL:videoFileInput];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset
presetName:AVAssetExportPresetLowQuality];
if (exportSession == nil)
{
return NO;
}
CMTime startTime = CMTimeMake((int)(floor(videoStartTime * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(videoEndTime * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exportSession.outputURL = videoFileOutput;
exportSession.timeRange = exportTimeRange;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
NSLog(#"Export OK");
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}
}];
return YES;
}

Resources