I am triming audio file using AVAssetExportSession but unable to achive fade in fade out effect in saved file. Here's the code that i am using.
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:file_path]];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset
presetName:AVAssetExportPresetAppleM4A];
time = floorf(Endtime-startTime);
CMTime start = CMTimeMake(0.0*100,100);
CMTime stop = CMTimeMake(time*100,100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(start, stop);
AVMutableAudioMix *mutableAudioMix = [AVMutableAudioMix audioMix];
AVURLAsset* audio = [[AVURLAsset alloc]initWithURL:[NSURL URLWithString:file_path] options:nil];
AVAssetTrack* audioTrack = [[audio tracksWithMediaType:AVMediaTypeAudio]
objectAtIndex:0];
AVMutableAudioMixInputParameters *mixParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioTrack];
[mixParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:exportTimeRange];
mutableAudioMix.inputParameters = #[mixParameters];
exportSession.audioMix = mutableAudioMix;
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
exportSession.timeRange = exportTimeRange;
[SVProgressHUD showWithStatus:#"Saving Recording.." maskType:SVProgressHUDMaskTypeBlack];
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
[SVProgressHUD dismiss];
NSLog(#"Success!");
NSLog(#"saving complete %#", exportSession.description);
NSLog(#"the Song Path : %#", strOutputFilePath);
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
NSLog(#"failed");
[SVProgressHUD showErrorWithStatus:#"Record Again"];
}
[[NSFileManager defaultManager] removeItemAtPath:file_path error:nil];
});
}];
I don't know what am i missing. Any help is appreciated.
Well try something like this
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:assetTrack];
exportAudioMixInputParameters.trackID = [[[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0] trackID];
NSMutableArray* inputParameters = [NSMutableArray arrayWithCapacity:1];
CMTime startFadeInTime = start;
CMTime endFadeInTime = CMTimeMake((startTime+2)*100, 100);
CMTime startFadeOutTime = CMTimeMake((time-2)*100, 100);
CMTime endFadeOutTime = CMTimeMake(time*100, 100);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeOutTimeRange];
[inputParameters insertObject:exportAudioMixInputParameters atIndex:0];
Hope it will help. Happy coding.
Related
I'm using the following code:
- (AVAssetExportSession *)testRecording
{
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
NSURL *url = [[NSBundle mainBundle] URLForResource:kTestSongName withExtension:#"mp3"];
AVAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
CMTimeRange range = CMTimeRangeMake(CMTimeMake(0, 600), asset.duration);
NSError *editError;
BOOL result = [composition insertTimeRange:range ofAsset:asset atTime:kCMTimeZero error:&editError];
if (!result) {
NSLog(#"Error inserting timerange");
}
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
return exportSession;
}
This does what it's supposed to on iOS8 but on iOS7 exportSession is always nil at the end of the method. I tried using assets with different formats (mp3, caf) but the result was the same.
If I play the composition instead of exporting it the sound is fine:
AVPlayerItem * item = [[AVPlayerItem alloc] initWithAsset:composition];
AVPlayer * player = [AVPlayer playerWithPlayerItem:item];
[player play];
Any idea about what might be wrong?
exportSession = [[AVAssetExportSession alloc]
initWithAsset:songAsset
presetName:AVAssetExportPresetAppleM4A];
exportSession.outputFileType = #"com.apple.m4a-audio";
filePath = [[docDir stringByAppendingPathComponent:fileName] stringByAppendingPathExtension:#"m4a.mov"]
exportSession.outputURL = [NSURL fileURLWithPath:filePath];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
NSString *newPath = [NSString stringWithFormat:#"%#/%#", documentDirectory, [[[filePath lastPathComponent] substringToIndex:[[filePath lastPathComponent] length]-5] stringByAppendingString:#"a"]];
[[NSFileManager defaultManager] removeItemAtPath:newPath error:nil];
NSError *error = nil;
[[NSFileManager defaultManager] moveItemAtPath:filePath toPath:newPath error:&error];
}
if (exportSession.status == AVAssetExportSessionStatusCancelled) {
}
if (exportSession.status == AVAssetExportSessionStatusExporting) {
}
if (exportSession.status == AVAssetExportSessionStatusFailed) {
NSLog(#"erros:%#",exportSession.error);
}
if (exportSession.status == AVAssetExportSessionStatusUnknown) {
}
if (exportSession.status == AVAssetExportSessionStatusWaiting) {
}
}];
If you want to save in mp3 format, use ffmpeg.
I have one audio file and want to change its album cover artwork. So, is it possible? And, how can i set the artwork in album cover for audio file in iOS programming?
Actually, i merged two audio file and want to add artwork for album cover which will show in iTune.
Code is given below:
- (BOOL) combineVoices1
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:#"Caches"];
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:#"/%#.m4a",textFieldMixFile.text];
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath];
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file>
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file>
if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
//CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix1;
if (playbackDelayAfterTimeMix1 > 0) {
nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1);
}else{
nextClipStartTimeMix1 = kCMTimeZero;
}
CMTime startTimeMix1;
if (playbackDelayMix1 > 0) {
startTimeMix1 = CMTimeMake(playbackDelayMix1, 1);
}else{
startTimeMix1 = kCMTimeZero;
}
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1];
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack;
if (tracks.count > 0) {
clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil];
//avAsset.commonMetadata
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix2;
if (playbackDelayAfterTimeMix2 > 0) {
nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1);
}else{
nextClipStartTimeMix2 = kCMTimeZero;
}
CMTime startTimeMix2;
if (playbackDelayMix2 > 0) {
startTimeMix2 = CMTimeMake(playbackDelayMix2, 1);
}else{
startTimeMix2 = kCMTimeZero;
}
[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2];
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1;
if (tracks1.count > 0) {
clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
[self performSelectorOnMainThread:#selector(performAction) withObject:nil waitUntilDone:NO];
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
[self performSelectorOnMainThread:#selector(hideSpinningWheel) withObject:nil waitUntilDone:NO];
[[NSTSharedData instance] showAlertForTitle:#"Error!" andMessage:[NSString stringWithFormat:#"%#",[[exportSession error] localizedDescription]]];
//NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}
}];
return YES;
}
I solved my problem and now it working fine, i added code near "AVAssetExportSession" in above code. And finally method is:
- (BOOL) combineVoices1
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:#"Caches"];
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:#"/%#.m4a",textFieldMixFile.text];
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath];
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file>
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file>
if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
//CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix1;
if (playbackDelayAfterTimeMix1 > 0) {
nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1);
}else{
nextClipStartTimeMix1 = kCMTimeZero;
}
CMTime startTimeMix1;
if (playbackDelayMix1 > 0) {
startTimeMix1 = CMTimeMake(playbackDelayMix1, 1);
}else{
startTimeMix1 = kCMTimeZero;
}
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1];
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack;
if (tracks.count > 0) {
clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil];
//avAsset.commonMetadata
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTimeMix2;
if (playbackDelayAfterTimeMix2 > 0) {
nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1);
}else{
nextClipStartTimeMix2 = kCMTimeZero;
}
CMTime startTimeMix2;
if (playbackDelayMix2 > 0) {
startTimeMix2 = CMTimeMake(playbackDelayMix2, 1);
}else{
startTimeMix2 = kCMTimeZero;
}
[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2];
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1;
if (tracks1.count > 0) {
clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}else{
return NO;
}
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil];
/**
added MetadataItem
**/
AVMutableMetadataItem *artistMetadata = [[AVMutableMetadataItem alloc] init];
artistMetadata.key = AVMetadataiTunesMetadataKeyArtist;
artistMetadata.keySpace = AVMetadataKeySpaceiTunes;
artistMetadata.locale = [NSLocale currentLocale];
artistMetadata.value = uTakeTheMicArtist;
AVMutableMetadataItem *albumMetadata = [[AVMutableMetadataItem alloc] init];
albumMetadata.key = AVMetadataiTunesMetadataKeyAlbum;
albumMetadata.keySpace = AVMetadataKeySpaceiTunes;
albumMetadata.locale = [NSLocale currentLocale];
albumMetadata.value = uTakeTheMicAlbum;
AVMutableMetadataItem *songMetadata = [[AVMutableMetadataItem alloc] init];
songMetadata.key = AVMetadataiTunesMetadataKeySongName;
songMetadata.keySpace = AVMetadataKeySpaceiTunes;
songMetadata.locale = [NSLocale currentLocale];
songMetadata.value = textFieldMixFile.text;
AVMutableMetadataItem *imageMetadata = [[AVMutableMetadataItem alloc] init];
imageMetadata.key = AVMetadataiTunesMetadataKeyCoverArt;
imageMetadata.keySpace = AVMetadataKeySpaceiTunes;
imageMetadata.locale = [NSLocale currentLocale];
imageMetadata.value = imageData; //imageData is NSData of UIImage.
NSArray *metadata = [NSArray arrayWithObjects:artistMetadata, albumMetadata, songMetadata, imageMetadata, nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
exportSession.metadata = metadata;
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
[self performSelectorOnMainThread:#selector(performAction) withObject:nil waitUntilDone:NO];
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
[self performSelectorOnMainThread:#selector(hideSpinningWheel) withObject:nil waitUntilDone:NO];
[[NSTSharedData instance] showAlertForTitle:#"Error!" andMessage:[NSString stringWithFormat:#"%#.",[[exportSession error] localizedDescription]]];
//NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}
}];
return YES;
}
I created a video using Array of images.
It creates the video successfully, then I add audio to that created video file.
I create AVMuatableComposition object, I add Video and audio by creating AVAssetTracks and finally export into single video file with help of AVAssetsExportSession.
Suppose the first video without Audio is vdo.mp4 and final (After adding audio) is final.mp4, So my final.mp4 is lower in size and resolution than the vdo.mp4
Here is my code which combines the both file,
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
NSURL *url = [NSURL fileURLWithPath:filePath];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error];
CMTime audioStartTime = kCMTimeZero;
for ( NSInteger i = 0;i< [mArrAudioFileNames count];i++ )
{
NSString *audioFileName = nil;
NSString *docsDir = nil;
if ( [mArrAudioFileNames objectAtIndex:i] != [NSNull null]) {
audioFileName = [mArrAudioFileNames objectAtIndex:i];
docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
}else{
//audioFileName = #" ";
docsDir = [[NSBundle mainBundle] pathForResource:#"sample" ofType:#"mp3"];
}
// NSString *docsDir = [[self dataFolderPathForAudio] stringByAppendingPathComponent:audioFileName];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:docsDir] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
Float64 duration = CMTimeGetSeconds(urlAsset.duration);
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) ((duration * kRecordingFPS) + 0.5), kRecordingFPS));
}
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
//assetExport.videoComposition = compositionVideoTrack;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;// #"com.apple.quicktime-movie";
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
[self performSelectorOnMainThread:#selector(creatingVideoDone:)
withObject:outFilePath waitUntilDone:NO];
[assetExport release];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// Set delegate to move to view
if ( mDelegate!= nil && [mDelegate respondsToSelector:#selector(errorAlert:)])
{
[self performSelectorOnMainThread:#selector(errorOccured:)
withObject:[assetExport.error
localizedDescription]
waitUntilDone:NO];
}
[assetExport release];
break;
}
}];
Any help is appreciated.
Thanks.
I've recorded sound into .caf file using AVAudioRecorder.
I want to cut/trim first 2 seconds of this file.
How to do that? (I've spent hours to find the solution, but no luck)
You can do this with AVAssetExportSession..
Try this code..
float vocalStartMarker = 0.0f;
float vocalEndMarker = 2.0f;
NSURL *audioFileInput = #"input filePath"; // give your audio file path
NSString *docsDirs;
NSArray *dirPath;
dirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
docsDirs = [dirPath objectAtIndex:0];
NSString *destinationURLs = [docsDirs stringByAppendingPathComponent:#"trim.caf"];
audioFileOutput = [NSURL fileURLWithPath:destinationURLs];
if (!audioFileInput || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVAsset *asset = [AVURLAsset URLAssetWithURL:audioFileInput options:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset
presetName:AVAssetExportPresetAppleM4A];
if (exportSession == nil)
{
return NO;
}
CMTime startTime = CMTimeMake((int)(floor(vocalStartMarker * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(vocalEndMarker * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exportSession.outputURL = audioFileOutput;
exportSession.outputFileType = AVFileTypeAppleM4A;
exportSession.timeRange = exportTimeRange;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
// It worked!
NSLog(#"DONE TRIMMING.....");
NSLog(#"ouput audio trim file %#",audioFileOutput);
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
// It failed...
NSLog(#"FAILED TRIMMING.....");
}
}];
i've created a method that trims and exports videos based on a given time range. It also rotates the video to landscape.
For some reason though, the AVAssetExportSession fails when attempting to process a video that previously was trimmed using UIVideoEditorController.
Anyone encountered this issue before?
I get this error:
AVAssetExportSessionStatusFailed: Error Domain=AVFoundationErrorDomain Code=-11841 "The operation couldn’t be completed. (AVFoundationErrorDomain error -11841.)"
For this method:
- (void) trimVideoWithRange: (CMTimeRange)range fromInputURL: (NSURL *)inputURL withCompletionHandler:(void (^)(BOOL success, NSURL *outputURL))handler;
{
AVAsset *asset = [AVURLAsset assetWithURL:inputURL];
AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *audioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
NSLog(#"%#, %#, %#", asset, videoTrack, audioTrack);
NSError *error;
// Create a video composition
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
error = nil;
[videoCompositionTrack insertTimeRange:videoTrack.timeRange ofTrack:videoTrack atTime:CMTimeMakeWithSeconds(0, NSEC_PER_SEC) error:&error];
NSLog(#"videoCompositionTrack timeRange: %lld, %lld", videoCompositionTrack.timeRange.start.value, videoCompositionTrack.timeRange.duration.value);
if(error)
NSLog(#"videoCompositionTrack error: %#", error);
AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
error = nil;
[audioCompositionTrack insertTimeRange:audioTrack.timeRange ofTrack:audioTrack atTime:CMTimeMakeWithSeconds(0, NSEC_PER_SEC) error:&error];
NSLog(#"audioCompositionTrack timeRange: %lld, %lld", audioCompositionTrack.timeRange.start.value, audioCompositionTrack.timeRange.duration.value);
if(error)
NSLog(#"audioCompositionTrack error: %#", error);
// Rotate video if needed
CGAffineTransform rotationTransform = videoTrack.preferredTransform;
// Create video composition
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderScale = 1.0;
videoComposition.renderSize = videoTrack.naturalSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
// Apply the transform which may have been changed
AVMutableVideoCompositionLayerInstruction *instruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[instruction setTransform:rotationTransform atTime:kCMTimeZero];
// Set the time range and layer instructions for the video composition
AVMutableVideoCompositionInstruction *videoTrackInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoTrackInstruction.layerInstructions = [NSArray arrayWithObject:instruction];
videoTrackInstruction.timeRange = range;
videoComposition.instructions = #[videoTrackInstruction];
// Check so that we can proceed with our desired output preset
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:composition];
if (![compatiblePresets containsObject:AVAssetExportPreset960x540])
{
// Nope.
if(handler)
handler(NO, nil);
return;
}
// Create export session with composition
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset960x540];
// Configure export session
exportSession.outputURL = [NSURL fileURLWithPath:pathToTemporaryOutput];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.videoComposition = videoComposition;
exportSession.shouldOptimizeForNetworkUse = YES;
// Export async
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
{
dispatch_async(dispatch_get_main_queue(), ^{
// Everything OK. Execute completion block with URL to rendered video
if(handler)
handler(exportSession.status == AVAssetExportSessionStatusCompleted, [NSURL fileURLWithPath:pathToTemporaryOutput]);
});
}
break;
case AVAssetExportSessionStatusFailed:
{
NSError *exportError = exportSession.error;
NSLog(#"AVAssetExportSessionStatusFailed: %#", exportError.description);
dispatch_async(dispatch_get_main_queue(), ^{
// No go. Execute handler with fail.
if(handler)
handler(NO, nil);
});
}
break;
}
}];
}
This works for me. Here exportSession is AVAssetExportSession
NSURL *videoFileUrl = [NSURL fileURLWithPath:self.originalVideoPath];
AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:videoFileUrl options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:anAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality]) {
self.exportSession = [[AVAssetExportSession alloc]
initWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
// Implementation continues.
NSURL *furl = [NSURL fileURLWithPath:self.tmpVideoPath];
self.exportSession.outputURL = furl;
//provide outputFileType acording to video format extension
self.exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = CMTimeMakeWithSeconds(self.startTime, anAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(self.stopTime-self.startTime, anAsset.duration.timescale);
CMTimeRange range = CMTimeRangeMake(start, duration);
self.exportSession.timeRange = range;
self.self.btnTrim.hidden = YES;
self.myActivityIndicator.hidden = NO;
[self.myActivityIndicator startAnimating];
[self.exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([self.exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[self.exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
NSLog(#"Triming Completed");
dispatch_async(dispatch_get_main_queue(), ^{
[self.myActivityIndicator stopAnimating];
self.myActivityIndicator.hidden = YES;
});
break;
}
}];
}