I am trying to trim and then compress a video file.
For trimming I am using AVAssetExportSession
For compression I am using AVAssetWriter.
If I use both codes individually every thing works fine but If I trim and then feed the trim output for compression I got compressed but corrupt video.
- (void)viewDidLoad
{
[super viewDidLoad];
[self trimVideo];
}
Trimming Code
-(void)trimVideo {
AVAsset *anAsset = [[AVURLAsset alloc]initWithURL:[self.asset valueForProperty:ALAssetPropertyAssetURL] options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
NSString *fName = [NSString stringWithFormat:#"%#.%#", #"tempVid", #"mp4"];
saveURL = [NSURL fileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:fName]];
NSString *fName1 = [NSString stringWithFormat:#"%#.%#", #"tempVid1", #"mp4"];
saveURL1 = [NSURL fileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:fName1]];
exportSession.outputURL = saveURL;
exportSession.outputFileType = AVFileTypeMPEG4;
CMTime start = CMTimeMakeWithSeconds(1.0, 600);
CMTime duration = CMTimeMakeWithSeconds(180.0, 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
[self convertVideoToLowQuailtyWithInputURL:saveURL outputURL:saveURL1];
break;
default:
break;
}
}];
}
Compression Code
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL
{
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//CGSize videoSize = videoTrack.naturalSize;
NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:960000],AVVideoAverageBitRateKey, AVVideoProfileLevelH264Main32, AVVideoProfileLevelKey,
[NSNumber numberWithInt:24], AVVideoMaxKeyFrameIntervalKey, [NSNumber numberWithInt:0.0], AVVideoMaxKeyFrameIntervalDurationKey, nil],
AVVideoCompressionPropertiesKey, [NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:320], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil];
[videoWriter addInput:videoWriterInput];
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
[videoReader addOutput:videoReaderOutput];
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[videoWriter startWriting];
[videoReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([videoReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[videoWriterInput markAsFinished];
if ([videoReader status] == AVAssetReaderStatusCompleted) {
//start writing from audio reader
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted) {
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog(#"Success");
}];
}
}
}
}
];
}
}
}
}
];
}
You must use AVMutableCompositionTrack
class and for media file trimming you must need use - (void)removeTimeRange:(CMTimeRange)timeRange method.
AVAsset *videoAsset = <#AVAsset with at least one video track#>;
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compatibleCompositionTrack = [mutableComposition mutableTrackCompatibleWithTrack:videoAssetTrack];
if (compatibleCompositionTrack) {
// Implementation continues.
[compatibleCompositionTrack removeTimeRange: yourTimeRange];
//Export Now your media file using exportSession
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:compatibleCompositionTrack presetName:AVAssetExportPresetHighestQuality];
exportSession.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
[self convertVideoToLowQuailtyWithInputURL:saveURL outputURL:saveURL1];
break;
default:
break;
}
}
];
}
Related
Below is my code about what i have done for this .mov to .wav conversations :
- (void)mp4ForURL:(NSURL *)videoURL{
// Create the asset url with the video file
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
// Check if video is supported for conversion or not
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality])////************************************////
{
//Create Export session
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];////************************************////
//Creating temp path to save the converted video
NSString* documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString* myDocumentPath = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL *url = [[NSURL alloc] initFileURLWithPath:myDocumentPath];
//Check if the file already exists then remove the previous file
if ([[NSFileManager defaultManager]fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager]removeItemAtPath:myDocumentPath error:nil];
}
exportSession.outputURL = url;
//set the output file format if you want to make it in other file format (ex .3gp)
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export session failed");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
{
//Video conversion finished
NSLog(#"Successful!");
[self convertMP4toMP3withFile:myDocumentPath];
}
break;
default:
break;
}
}];
}
else
{
NSLog(#"Video file not supported!");
}
}
-(void)convertMP4toCAFwithFile:(NSString*)dstPath //Converted to Core Audio Format .caf
{
NSURL *dstURL = [NSURL fileURLWithPath:dstPath];
AVMutableComposition* newAudioAsset = [AVMutableComposition composition];
AVMutableCompositionTrack* dstCompositionTrack;
dstCompositionTrack = [newAudioAsset addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset* srcAsset = [AVURLAsset URLAssetWithURL:dstURL options:nil];
AVAssetTrack* srcTrack = [[srcAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange timeRange = srcTrack.timeRange;
NSError* error;
if(NO == [dstCompositionTrack insertTimeRange:timeRange ofTrack:srcTrack atTime:kCMTimeZero error:&error]) {
NSLog(#"track insert failed: %#\n", error);
return;
}
AVAssetExportSession* exportSesh = [[AVAssetExportSession alloc] initWithAsset:newAudioAsset presetName:AVAssetExportPresetPassthrough];
exportSesh.outputFileType = AVFileTypeCoreAudioFormat;
exportSesh.outputURL = dstURL;
[[NSFileManager defaultManager] removeItemAtURL:dstURL error:nil];
[exportSesh exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exportSesh.status;
NSLog(#"exportAsynchronouslyWithCompletionHandler: %li\n", (long)status);
if(AVAssetExportSessionStatusFailed == status) {
NSLog(#"FAILURE: %#\n", exportSesh.error);
} else if(AVAssetExportSessionStatusCompleted == status) {
NSLog(#"SUCCESS!\n");
NSError *error;
//append the name of the file in jpg form
//check if the file exists (completely unnecessary).
NSString *onlyPath = [dstPath stringByDeletingLastPathComponent];
NSInteger randomNumber = arc4random() % 100000;
strDateAndTime = [self getCurrentDateAndTime];
strAudioName = [NSString stringWithFormat:#"%#_%ld_%#.%#", #"Audio", (long)randomNumber, strDateAndTime, #"caf"];
NSString *toPathString = [NSString stringWithFormat:#"%#/%#", onlyPath, strAudioName];
[[NSFileManager defaultManager] moveItemAtPath:dstPath toPath:toPathString error:&error];
//[self uploadAudioOnAWSFromPath:toPathString];
[self convertToWavForFilePath:toPathString];
}
}];
}
- (void)convertToWavForFilePath:(NSString *)cafFilePath
{
// set up an AVAssetReader to read from the iPod Library
// NSString *cafFilePath=[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSInteger randomNumber = arc4random() % 100000;
_finalAudioName = [NSString stringWithFormat:#"%#_%ld_%#", #"Audio", (long)randomNumber, strDateAndTime];
// NSString *title = #"MyRec";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
__block NSString *wavFilePath = [[docDir stringByAppendingPathComponent :_finalAudioName]
stringByAppendingPathExtension:#"wav"];
_finalAudioName = [wavFilePath lastPathComponent];
if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeWAVE
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
CMTime sampleDuration = CMSampleBufferGetDuration(nextBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
float dProgress= CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(songAsset.duration);
NSLog(#"%f",dProgress);
}
else
{
[assetWriterInput markAsFinished];
[assetReader cancelReading];
[assetWriter finishWritingWithCompletionHandler:^{
[self uploadAudioOnAWSFromPath:wavFilePath];
}];
}
}
}];
}
In above code, the converted final file which is in .wav format that I have to send on AWS server to get text from it, For text conversation we used Google Speech API, and whenever I send file which is converted from below iOS 11 devices(like iOS 9,10), it shows me this error :
Error: WAV header indicates an unsupported format.
Above error is shown by Google Speech API while converting an Audio file(.wav) to Text format.
Above code is working perfectly with latest iOS 11 devices, only below iOS 11(like iOS 9,10) devices are not working properly.
I am able to record a video using AVCaptureSession.But i want to
reduce it size.How can i do this ? I am getting final URL in Delegate
"captureOutput" method.
VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
{
[CaptureSession addInput:VideoInputDevice];
}
else
{
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
I hope this method will be helpful for u
- (void)compressingVideoWithSoundWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL andResolution:(CGSize)resolution
{
self.myVideoWriter = nil;
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
//setup video writer
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:resolution.width], AVVideoWidthKey, [NSNumber numberWithFloat:resolution.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
NSError* writerError = nil;
self.myVideoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
if (writerError) {
NSLog(#"Writer error: %#", writerError);
NSString *message = [NSString stringWithFormat:#"VideoEditor. compressingVideoWithInputURL: outputURL: andResolution:. Writer error: %#", writerError];
FLog(message);
return;
}
[self.myVideoWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
[videoReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[self.myVideoWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[self.myVideoWriter startWriting];
//start writing from video reader
[videoReader startReading];
[self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", DISPATCH_QUEUE_SERIAL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([videoReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[videoWriterInput markAsFinished];
if ([videoReader status] == AVAssetReaderStatusCompleted) {
if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) {
}
else{
//start writing from audio reader
[audioReader startReading];
[self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted)
{
[self finishWritingAction];
break;
}
}
}
}
];
}
}
}
}
}
];
}
You want to use an AVAssetExportSession. You code will look something like this:
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:myAsset presetName:exportPreset];
exporter.videoComposition = _videoComposition;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = NO;
url = [url URLByAppendingPathExtension:CFBridgingRelease(UTTypeCopyPreferredTagWithClass((__bridge CFStringRef)(exporter.outputFileType), kUTTagClassFilenameExtension))];
exporter.outputURL = url;
[exporter exportAsynchronouslyWithCompletionHandler:^{
// handle the completion in a way meaningful to your app
}];
You export preset should be one of these:
AVF_EXPORT NSString *const AVAssetExportPreset640x480 NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset960x540 NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1280x720 NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1920x1080 NS_AVAILABLE(10_7, 5_0);
AVF_EXPORT NSString *const AVAssetExportPreset3840x2160 NS_AVAILABLE(10_10, NA);
How can i compress a video using bit rate ?
I tried below code to compress a video but its not working because it giving me an error like
******Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '* -[AVAssetReader startReading] cannot be called again after reading has already started'****
- (void) imagePickerController: (UIImagePickerController *) picker
didFinishPickingMediaWithInfo: (NSDictionary *) info
{
// Handle movie capture
NSURL *movieURL = [info objectForKey:
UIImagePickerControllerMediaURL];
NSData *data = [NSData dataWithContentsOfURL:movieURL];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *tempPath = [documentsDirectory stringByAppendingFormat:#"/vid1.mp4"];
BOOL success = [data writeToFile:tempPath atomically:NO];
if (success)
{
NSLog(#"VIdeo Successfully written");
}
else
{
NSLog(#"VIdeo Wrting failed");
}
NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:#"1234"] stringByAppendingString:#".mp4"]];
// Compress movie first
[self convertVideoToLowQuailtyWithInputURL:movieURL outputURL:uploadURL];
}
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL
outputURL:(NSURL*)outputURL
{
//setup video writer
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = videoTrack.naturalSize;
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil];
[videoWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
[videoReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[videoWriter startWriting];
//start writing from video reader
[videoReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef sampleBuffer;
if ([videoReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer]))
{
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
if ([videoReader status] == AVAssetReaderStatusCompleted)
{
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted)
{
[videoWriter finishWritingWithCompletionHandler:^()
{
NSLog(#"Output URl : %#",outputURL);
}];
}
}
}
}
];
}
}
}
}
];
}
You can use the parameters below to compress the video for qualities.
AVAssetExportPresetLowQuality
AVAssetExportPresetMediumQuality
AVAssetExportPresetHighestQuality
Code:
- (void)CompressVideo
{
if(firstAsset !=nil)
{
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
// http://stackoverflow.com/questions/22715881/merge-video-files-with-their-original-audio-in-ios
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//For Audio Track inclusion
//============================================================================================
NSArray *arr = [firstAsset tracksWithMediaType:AVMediaTypeAudio];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[arr lastObject] atTime:kCMTimeZero error:nil];
//===============================================================================================
AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;
}
CGFloat FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_)
{
FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
else
{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
// MainCompositionInst.renderSize = CGSizeMake(VideoWidth, 900);
MainCompositionInst.renderSize = CGSizeMake(VideoWidth, [UIScreen mainScreen].bounds.size.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"CompressedVideo.mov"];
NSLog(#"myPath Docs : %#",myPathDocs);
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
NSError *error;
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:&error];
}
//Movie Quality
//==================================================
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
//==================================================
exporter.outputURL=url;
//Movie Type
//==================================================
exporter.outputFileType = AVFileTypeQuickTimeMovie;
//==================================================
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^
{
videoUrToUload = url;
[self exportDidFinish:exporter];
});
}];
}
}
- (void)exportDidFinish:(AVAssetExportSession *)session
{
if(session.status == AVAssetExportSessionStatusCompleted)
{
//Store URL Somewhere using session.url
}
}
I got same crash issue but after do some changes, this method worked for me.. Just replace this method with your above method...
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL
outputURL:(NSURL*)outputURL
{
//setup video writer
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = videoTrack.naturalSize;
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil];
[videoWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
[videoReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[videoWriter startWriting];
//start writing from video reader
[videoReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([videoReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[videoWriterInput markAsFinished];
if ([videoReader status] == AVAssetReaderStatusCompleted) {
if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) {
}
else{
//start writing from audio reader
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted) {
[videoWriter finishWritingWithCompletionHandler:^(){
// [self sendMovieFileAtURL:outputURL];
NSString *moviePath = [outputURL path];
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(moviePath)) {
UISaveVideoAtPathToSavedPhotosAlbum(moviePath, self,
#selector(video:didFinishSavingWithError:contextInfo:), nil);
}
}];
break;
}
}
}
}
];
}
}
}
}
}
];
}
I'm attempting to combine a number of video files into a single file with specific codec settings. I used to use the AVAssetExportSession for this, but I now need more control over the codec than the AVAssetExportSession offers.
Below I've posted the createFinalVideo: function that handles the combination of the video files.
The approach I've taken is attempting to write to the same file with an AVAssetWriter while simply starting the session at the location where the next video should be appended. I know this will not work because the AVAssetWriter apparently doesn't allow for this behavior.
Previously, I had the AVAssetWriter defined outside of the for loop, and I was attempting to add a new input for each video file (each pass of the for loop). However, it appears that AVAssetWriter doesn't allow for adding new inputs after [AVAssetWriter startWriting] has been called.
My question is how do I do what I'm trying to do the right way?
/**
* Final video creation. Merges audio-only and video-only files.
**/
-(void)createFinalVideo:(id)args
{
ENSURE_SINGLE_ARG(args, NSDictionary);
// presentation id
NSString * presID = [args objectForKey:#"presID"];
// array of video paths
NSArray * videoPathsArray = [args objectForKey:#"videoPaths"];
videoSuccessCallback = [args objectForKey:#"videoSuccess"];
videoCancelCallback = [args objectForKey:#"videoCancel"];
videoErrorCallback = [args objectForKey:#"videoError"];
NSError * error = nil;
NSFileManager * fileMgr = [NSFileManager defaultManager];
NSString * bundleDirectory = [[NSBundle mainBundle] bundlePath];
NSString * documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
/*********************************************************************/
/* BEGIN: merge all videos into a final MP4 */
/*********************************************************************/
// create the final video output file as MP4 file
NSString * finalOutputFilePath = [NSString stringWithFormat:#"%#/%#/final_video.mp4", documentsDirectory, presID];
NSURL * finalOutputFileUrl = [NSURL fileURLWithPath:finalOutputFilePath];
// delete file if it exists
if ([fileMgr fileExistsAtPath:finalOutputFilePath]) {
[fileMgr removeItemAtPath:finalOutputFilePath error:nil];
}
float renderWidth = 640, renderHeight = 480;
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:renderWidth], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:renderHeight], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:10], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:10], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1960000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:24],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:renderWidth], AVVideoWidthKey,
[NSNumber numberWithInt:renderHeight], AVVideoHeightKey,
AVVideoScalingModeResizeAspect, AVVideoScalingModeKey,
nil];
NSError *aerror = nil;
// next start time for adding to the compositions
CMTime nextStartTime = kCMTimeZero;
// loop through the video paths and add videos to the composition
for (NSString * path in videoPathsArray) {
// wait for each video to finish writing before continuing
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
// create video writer
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:finalOutputFileUrl fileType:AVFileTypeQuickTimeMovie error:nil];
NSParameterAssert(videoWriter);
NSLog(#"at the top of the for loop");
NSLog(#"%#", path);
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoCompressionSettings];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
// video setup
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:path] options:nil];
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
CMTime videoDuration = avAsset.duration;
// Wait until the duration is actually available
int durationAttempts = 5;
while(CMTimeGetSeconds(videoDuration) == 0 && durationAttempts > 0) {
durationAttempts--;
[NSThread sleepForTimeInterval:0.3];
videoDuration = avAsset.duration;
}
NSLog(#"[INFO] MODULE-VIDUTILS video duration in secs: %f", CMTimeGetSeconds(videoDuration));
//videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:nil];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSLog(#"startSessionAtSourceTime: %f", CMTimeGetSeconds(nextStartTime));
[videoWriter startSessionAtSourceTime:nextStartTime];
// set next start time
nextStartTime = CMTimeAdd(nextStartTime, videoDuration);
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("AVAssetWriterQueue", DISPATCH_QUEUE_SERIAL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
[reader cancelReading];
NSLog(#"NO RESULT");
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterInputStatusFailed: %#", videoWriter.error);
if (videoErrorCallback != nil) {
[self _fireEventToListener:#"videoError" withObject:nil listener:videoErrorCallback thisObject:nil];
}
return;
break;
}
} else {
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusReading:
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusCompleted:
[audioReader startReading];
[videoWriter startSessionAtSourceTime:nextStartTime];
NSLog(#"Request");
NSLog(#"Asset Writer ready :%d", audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading && (nextBuffer = [readerOutput copyNextSampleBuffer])) {
NSLog(#"Ready");
if (nextBuffer) {
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
} else {
[audioWriterInput markAsFinished];
//dictionary to hold duration
if ([audioReader status] == AVAssetReaderStatusCompleted) {
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetReaderStatusCompleted");
[videoWriter finishWritingWithCompletionHandler:^{
switch([videoWriter status]) {
case AVAssetWriterStatusCompleted:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterStatusCompleted");
dispatch_semaphore_signal(semaphore);
break;
case AVAssetWriterStatusCancelled:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterStatusCancelled");
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoCancel" withObject:nil listener:videoCancelCallback thisObject:nil];
}
return;
break;
case AVAssetWriterStatusFailed:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterStatusFailed");
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoError" withObject:nil listener:videoErrorCallback thisObject:nil];
}
return;
break;
}
}];
break;
}
}
}
break;
case AVAssetReaderStatusFailed:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetReaderStatusFailed, #%", reader.error);
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoError" withObject:nil listener:videoErrorCallback thisObject:nil];
}
[videoWriter cancelWriting];
return;
break;
}
break;
}
}
}];
// wait for the writing to finish
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
NSLog(#"Write Ended");
}
NSLog(#"got here -- should have waited for all videos to complete first");
// call success if we got here
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoSuccess" withObject:nil listener:videoSuccessCallback thisObject:nil];
}
}
I found a replacement for AVAssetExportSession called SDAVAssetExportSession that allows you to specify settings instead of using presets.
I've been attempting to use AVAssetWriter to crop and export a video in an iOS app. Everything has been going swimmingly up until I attempted to add audio to the output. The current code (see below) returns the following error
NSInvalidArgumentException', reason: '* -[AVAssetReader startReading] cannot be called again after reading has already started'
From what I've written below, this should be using a different reader than the one that was initialized, and so I don't understand the issue. This may be a simple issue of needing another pair of eyes on it, so here's the code.
-(void)cropVideoAsset:(NSURL *)fileURL
{
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"croppedVideo.mov"];
toUrl = [NSURL fileURLWithPath:outputPath];
if([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
{[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];}
AVAsset *asset = [AVAsset assetWithURL:fileURL];
NSLog(#"asset: %f", CMTimeGetSeconds(asset.duration));
NSDictionary* compressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoProfileLevelH264Main31, AVVideoProfileLevelKey,
[NSNumber numberWithInt:2500000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt: 5], AVVideoMaxKeyFrameIntervalKey,
nil];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:500], AVVideoWidthKey,
[NSNumber numberWithInt:500], AVVideoHeightKey,
AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
compressionSettings, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
writerInput.expectsMediaDataInRealTime = true;
NSError *error;
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:toUrl fileType:AVFileTypeMPEG4 error:&error];
[assetWriter addInput:writerInput];
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
writerInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoOptions];
[assetReader addOutput:asset_reader_output];
//audio setup
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil];
AVAssetWriterInput *audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
audioWriterInput.expectsMediaDataInRealTime = true;
[assetWriter addInput:audioWriterInput];
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[assetReader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
while(writerInput.readyForMoreMediaData)
{
switch(assetReader.status)
{
case AVAssetReaderStatusReading:
{
CMSampleBufferRef sampleBuffer = [asset_reader_output copyNextSampleBuffer];
if (sampleBuffer)
{
BOOL result = [writerInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result)
{
[SVProgressHUD dismiss];
[self enableView];
NSString *errorMessage = [NSString stringWithFormat:#"We're sorry, an error occurred during the video capture.\n\n%#\n\nPlease try again.", assetWriter.error.localizedDescription];
[UIAlertView simpleAlertWithTitle:#"Video Error" andMessage:errorMessage];
[assetReader cancelReading];
break;
}
}
}
break;
case AVAssetReaderStatusCompleted:
{
[self handleAudioWithReader:audioReader writer:assetWriter input:audioWriterInput andOutput:readerOutput];
}
break;
case AVAssetReaderStatusFailed:
[SVProgressHUD dismiss];
[self enableView];
[UIAlertView simpleAlertWithTitle:#"Video Error" andMessage:#"We're sorry, an error occurred during the video capture. Please try again."];
[assetWriter cancelWriting];
break;
}
}
}];
}
and the requisite call
-(void)handleAudioWithReader:(AVAssetReader *)audioReader writer:(AVAssetWriter *)videoWriter input:(AVAssetWriterInput *)audioWriterInput andOutput:(AVAssetReaderOutput *)readerOutput
{
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(#"Asset Writer ready :%d", audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer;
if([audioReader status] == AVAssetReaderStatusReading && (nextBuffer = [readerOutput copyNextSampleBuffer]))
{
if (nextBuffer)
{
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
else
{
[audioWriterInput markAsFinished];
switch ([audioReader status])
{
case AVAssetReaderStatusCompleted:
[videoWriter finishWriting];
[audioWriterInput markAsFinished];
dispatch_async(dispatch_get_main_queue(),^{[self saveAssetToLibraryAtURL:toUrl];});
break;
}
}
}
}];
}