I am trying to generate video from images with AVAssetWriter and add Ken Burns effect to each image, this is my code for creating video from images array.
The questions is how to implement Ken Burns effect for each image for 5 seconds
- (void) writeImagesAsMovie:(NSArray *)array toPath:(NSString*)path
{
UIImage *first = array[0];
CGSize frameSize = first.size;
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
if(error) {
NSLog(#"error creating AssetWriter: %#",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
BOOL start = [videoWriter startWriting];
NSLog(#"Session started? %d", start);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
int fps = 1;
int i = 0;
for (UIImage *imgFrame in array)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
i++;
NSLog(#"inside for loop %d ",i);
CMTime frameTime = CMTimeMake(5, fps);
CMTime lastTime=CMTimeMake(i, fps);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO)
{
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
NSLog(#"error");
i--;
}
[NSThread sleepForTimeInterval:0.02];
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
LRProjectListViewController * proj = (LRProjectListViewController*)[[(UINavigationController*)DELEGATE.window.rootViewController viewControllers] firstObject];
[proj playVideoWithUrl:path];
}
It depends on the frame rate you are writing out in your composition. Assuming that it's 30FPS, then in your for loop, you'll need 30 frames in the array for each second of video.
for (UIImage *imgFrame in array)
You can modify your code to write out to appendPixelBuffer multiple times for each frame as an alternative. If you want it to play for 5 seconds, then you'd need to loop (FPS * 5) times. BUT without manipulating the frame, that's just going to give you a static picture for 5 seconds.
Without knowing the details of what you want to achieve, I think that this is not the best way to handle this problem.
Instead, look at using CAAnimations with AVVideoCompositionCoreAnimationTool. This will work directly with AVAssetExportSession which is MUCH easier than playing with AVAssetWriter unless you have a very specific requirement (typically you need to manipulate the actual video frames).
Related
I am recording a video in my app whose recording time is sent by the server. The server tests for certain gestures in the video and if it fails sends back a flag asking to re-record and resend.
Problem: Initial video is of correct duration. But the videos, in the event of server 'retry request' are appended with a black screen for a certain amount of time and then the recorded video plays.
flow:
- (void)viewDidLoad {
[super viewDidLoad];
// get recording time
[self getRecordingData];
}
recording function:
-(void)startRecording {
self.isRecording = YES;
self.isProcessing = YES;
[self startWritingToVideoFile];
[self logicMethodToStopRecording];
}
writing to video file function:
-(void)startWritingToVideoFile{
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:480], AVVideoHeightKey, AVVideoCodecTypeH264, AVVideoCodecKey,nil];
self.assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
[self.assetWriterInput setTransform: CGAffineTransformMakeRotation( ( 90 * M_PI ) / 180 )];
self.pixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:self.assetWriterInput
sourcePixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
kCVPixelBufferPixelFormatTypeKey,
nil]];
self.videoPath = [Utilities pathForTemporaryFileWithSuffix:#"mov"];
NSURL *videoURL = [NSURL fileURLWithPath:self.videoPath];
/* Asset writer with MPEG4 format*/
self.assetWriterMyData = [[AVAssetWriter alloc]
initWithURL: videoURL
fileType:AVFileTypeMPEG4
error:nil];
[self.assetWriterMyData addInput:self.assetWriterInput];
self.assetWriterInput.expectsMediaDataInRealTime = YES;
[self.assetWriterMyData startWriting];
[self.assetWriterMyData startSessionAtSourceTime:kCMTimeZero];
self.isReadyToWrite = YES;
}
logic method to stop recording:
-(void)logicMethodToStopRecording{
float time = [self.timeFromServer floatValue];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, time * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self stopRecording];
});
}
actual stop recording Method:
-(void)stopRecording{
[self stopWritingToVideoFile];
}
-(void)stopWritingToVideoFile {
[self.assetWriterMyData finishWritingWithCompletionHandler:^{
[self showLoading];
if(!self.continueRunning){
return;
}
[self sendVideo];
// sends video file referenced in startWritingToVideoFile method
}];
}
The sending video method:
-(void)sendVideo{
[self.xxxxxx sendAPI:self.userToVerifyUserId videoPath:self.videoPath callback:^(NSString *jsonResponse){
[self handleResponse:jsonResponse];
}];
}
The response is handled by the following method:
-(void)handleResponse: (NSString*)result{
[Utilities deleteFile:self.videoPath];
//if no success but retry is true. re-record and re-send.
if(!self.success && retry){
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 3.0 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self startRecording]; // start recording called again
});
}
The initial file I create has suppose x seconds of video ---- it is being generated correctly
However, during re-attempts:
I am deleting the old file and recreating new files. Still my video output in the second and third attempts have black screen for n+x and 2n+x times. Please help.
************** EDIT ****************************************************
Additionally the callback method for AVCaptureSession does the following:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// a very dense way to keep track of the time at which this frame
// occurs relative to the output stream, but it's just an example!
static int64_t frameNumber = 0;
if(self.assetWriterInput.readyForMoreMediaData){
if(self.pixelBufferAdaptor != nil){
[self.pixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:CMTimeMake(frameNumber, 25)];
frameNumber++;
}
}
}
}
**************************EDIT - 2 *********************************************
+(NSString *)pathForTemporaryFileWithSuffix:(NSString *)suffix
{
NSString * result;
CFUUIDRef uuid;
CFStringRef uuidStr;
uuid = CFUUIDCreate(NULL);
assert(uuid != NULL);
uuidStr = CFUUIDCreateString(NULL, uuid);
assert(uuidStr != NULL);
result = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.%#", uuidStr, suffix]];
assert(result != nil);
CFRelease(uuidStr);
CFRelease(uuid);
return result;
}
I'm working on a personal iOS project that requires full screen videos (15 seconds in length) to be uploaded to a backend over a 4G connection. While I can take videos just fine, the output size of the file comes out to 30MB which makes me think I'm doing something drastically wrong when it comes to compression. Below is the code I'm using to se up the AssetWriter:
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
NSLog(#"Started Recording! *******************");
self.movieWriter = [AVAssetWriter assetWriterWithURL:fileURL fileType:AVFileTypeMPEG4 error:nil];
[self.movieWriter setShouldOptimizeForNetworkUse:YES];
NSDictionary *videoCleanApertureSettings = #{
AVVideoCleanApertureWidthKey: [NSNumber numberWithFloat:self.view.frame.size.width],
AVVideoCleanApertureHeightKey: [NSNumber numberWithFloat:self.view.frame.size.height],
AVVideoCleanApertureHorizontalOffsetKey: [NSNumber numberWithInt:10],
AVVideoCleanApertureVerticalOffsetKey: [NSNumber numberWithInt:10],
};
NSDictionary *videoCompressionSettings = #{
AVVideoAverageBitRateKey: [NSNumber numberWithFloat:5000000.0],
AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInteger:1],
AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30,
AVVideoCleanApertureKey: videoCleanApertureSettings,
};
NSDictionary *videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithFloat:self.view.frame.size.width],
AVVideoHeightKey: [NSNumber numberWithFloat:self.view.frame.size.height],
AVVideoCompressionPropertiesKey: videoCompressionSettings,
};
self.movieWriterVideoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
self.movieWriterVideoInput.expectsMediaDataInRealTime = YES;
[self.movieWriter addInput:self.movieWriterVideoInput];
NSDictionary *audioSettings = #{AVFormatIDKey: [NSNumber numberWithInteger:kAudioFormatMPEG4AAC],
AVSampleRateKey: [NSNumber numberWithFloat:44100.0],
AVNumberOfChannelsKey: [NSNumber numberWithInteger:1],
};
self.movieWriterAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
self.movieWriterAudioInput.expectsMediaDataInRealTime = YES;
[self.movieWriter addInput:self.movieWriterAudioInput];
[self.movieWriter startWriting];
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Done Recording!");
[self.movieWriterVideoInput markAsFinished];
[self.movieWriterAudioInput markAsFinished];
[self.movieWriter finishWritingWithCompletionHandler:^{
AVURLAsset *compressedVideoAsset = [[AVURLAsset alloc] initWithURL:self.movieWriter.outputURL options:nil];
//Upload video to server
}];
}
For the setup of the actual session I'm using the following code:
//Indicate that some changes will be made to the session
[self.captureSession beginConfiguration];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureInput* currentCameraInput = [self.captureSession.inputs objectAtIndex:0];
for (AVCaptureInput *captureInput in self.captureSession.inputs) {
[self.captureSession removeInput:captureInput];
}
//Get currently selected camera and use for input
AVCaptureDevice *videoCamera = nil;
if(((AVCaptureDeviceInput*)currentCameraInput).device.position == AVCaptureDevicePositionBack)
{
videoCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
else
{
videoCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
//Add input to session
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCamera error:nil];
[self.captureSession addInput:newVideoInput];
//Add mic input to the session
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
[self.captureSession addInput:audioInput];
//Add movie output to session
for (AVCaptureOutput *output in self.captureSession.outputs) {
[self.captureSession removeOutput:output];
}
self.movieOutput = [AVCaptureMovieFileOutput new];
int32_t preferredTimeScale = 30; //Frames per second
self.movieOutput.maxRecordedDuration = CMTimeMakeWithSeconds(15, preferredTimeScale); //Setting the max video length
[self.captureSession addOutput:self.movieOutput];
//Commit all the configuration changes at once
[self.captureSession commitConfiguration];
I know that if I change AVCaptureSessionPresetHigh to a different preset I can reduce the file size of the final video, but unfortunately is looks like AVCaptureSessionPresetiFrame1280x720 is the only one that provides the full frame I'm trying to capture (which leaves me with an output size of about 20MB and is still too large for 4G uploads).
I've spent a lot of time googling and searching through other posts on Stack Overflow, but I can't seem to figure out what I'm doing wrong for the life of me and any help at all would be greatly appreciated.
You need a PhD to work with AVAssetWriter - it's non-trivial: https://developer.apple.com/library/mac/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/05_Export.html#//apple_ref/doc/uid/TP40010188-CH9-SW1
There's an amazing library for doing exactly what you want which is just an AVAssetExportSession drop-in replacement with more crucial features like changing the bit rate: https://github.com/rs/SDAVAssetExportSession
Here's how to use it:
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:[AVAsset assetWithURL:[info objectForKey:UIImagePickerControllerMediaURL]]];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
self.myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"lowerBitRate-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:self.myPathDocs];
encoder.outputURL=url;
encoder.outputFileType = AVFileTypeMPEG4;
encoder.shouldOptimizeForNetworkUse = YES;
encoder.videoSettings = #
{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoCompressionPropertiesKey: #
{
AVVideoAverageBitRateKey: #2300000, // Lower bit rate here
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40,
},
};
encoder.audioSettings = #
{
AVFormatIDKey: #(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: #2,
AVSampleRateKey: #44100,
AVEncoderBitRateKey: #128000,
};
[encoder exportAsynchronouslyWithCompletionHandler:^
{
int status = encoder.status;
if (status == AVAssetExportSessionStatusCompleted)
{
AVAssetTrack *videoTrack = nil;
AVURLAsset *asset = [AVAsset assetWithURL:encoder.outputURL];
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
videoTrack = [videoTracks objectAtIndex:0];
float frameRate = [videoTrack nominalFrameRate];
float bps = [videoTrack estimatedDataRate];
NSLog(#"Frame rate == %f",frameRate);
NSLog(#"bps rate == %f",bps/(1024.0 * 1024.0));
NSLog(#"Video export succeeded");
// encoder.outputURL <- this is what you want!!
}
else if (status == AVAssetExportSessionStatusCancelled)
{
NSLog(#"Video export cancelled");
}
else
{
NSLog(#"Video export failed with error: %# (%d)", encoder.error.localizedDescription, encoder.error.code);
}
}];
}
I'm coding an iPad app in which I need to join mp4 file with different resolution. To do so I'm using a combination of AVAssetReader to read the mp4 source files and AVAssetWriter to write those source files in a single mp4 output file.
I've tried to use AVAssetExportSession but the problem I had was there was black frames between the different joined files.
The problem I'm facing now is that everything seems OK but the completion handler of the AVAssetWriter is never called.
Here is my selector taking as input a list of mp4 file URL, a single output file URL and a completion handler.
- (void)resizeAndJoinVideosAtURLs:(NSArray *)videoURLs toOutputURL:(NSURL *)outputURL withHandler:(void(^)(NSURL *fileURL))handler
{
/*
First step: create the writer and writer input
*/
NSError *error = nil;
self.videoAssetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:&error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:640], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = NO;
if([self.videoAssetWriter canAddInput:videoWriterInput])
{
[self.videoAssetWriter addInput:videoWriterInput];
[self.videoAssetWriter startWriting];
[self.videoAssetWriter startSessionAtSourceTime:kCMTimeZero];
/*
Second step: for each video URL given create a reader and an reader input
*/
for(NSURL *videoURL in videoURLs)
{
NSLog(#"Processing file: %#",videoURL);
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
AVAssetReader *videoAssetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:&error];
AVAssetTrack *videoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;
if([videoAssetReader canAddOutput:videoAssetTrackOutput])
{
[videoAssetReader addOutput:videoAssetTrackOutput];
[videoAssetReader startReading];
/*
Step three: copy the buffers from the reader to the writer
*/
while ([videoAssetReader status] == AVAssetReaderStatusReading)
{
if(![videoWriterInput isReadyForMoreMediaData]) continue;
CMSampleBufferRef buffer = [videoAssetTrackOutput copyNextSampleBuffer];
if(buffer)
{
[videoWriterInput appendSampleBuffer:buffer];
CFRelease(buffer);
}
}
} else NSLog(#"ERROR: %#",error);
}
[videoWriterInput markAsFinished];
} else NSLog(#"ERROR: %#",error);
__weak ClipBuilder *weakself = self;
[self.videoAssetWriter finishWritingWithCompletionHandler:^{
handler(outputURL);
weakself.videoAssetWriter = nil;
}];
}
My output file exist and the AVAssetWriter exist since it is a property but still the completion handler is not called. What can explain this?
Thanks for your help.
What can explain that?
Here is the solution I finally implemented to join mp4 file with different resolution with a combination of AVAssetReader / AVAssetWriter.
- (void)reencodeComposition:(AVComposition *)composition toMP4File:(NSURL *)mp4FileURL withCompletionHandler:(void (^)(void))handler
{
self.status = EncoderStatusEncoding;
/*
Create the asset writer to write the file on disk
*/
NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:mp4FileURL.path isDirectory:nil])
{
if(![[NSFileManager defaultManager] removeItemAtPath:mp4FileURL.path error:&error])
{
[self failWithError:error withCompletionHandler:handler];
return;
}
}
self.assetWriter = [[AVAssetWriter alloc] initWithURL:mp4FileURL fileType:AVFileTypeMPEG4 error:&error];
if(self.assetWriter)
{
/*
Get the audio and video track of the composition
*/
AVAssetTrack *videoAssetTrack = [composition tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *audioAssetTrack = [composition tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSDictionary *videoSettings = #{AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey:#(self.imageWidth), AVVideoHeightKey:#(self.imageHeight)};
/*
Add an input to be able to write the video in the file
*/
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
if([self.assetWriter canAddInput:videoWriterInput])
{
[self.assetWriter addInput:videoWriterInput];
/*
Add an input to be able to write the audio in the file
*/
// Use this only if you know the format
// CMFormatDescriptionRef audio_fmt_desc_ = nil;
//
// AudioStreamBasicDescription audioFormat;
// bzero(&audioFormat, sizeof(audioFormat));
// audioFormat.mSampleRate = 44100;
// audioFormat.mFormatID = kAudioFormatMPEG4AAC;
// audioFormat.mFramesPerPacket = 1024;
// audioFormat.mChannelsPerFrame = 2;
// int bytes_per_sample = sizeof(float);
// audioFormat.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked;
//
// audioFormat.mBitsPerChannel = bytes_per_sample * 8;
// audioFormat.mBytesPerPacket = bytes_per_sample * 2;
// audioFormat.mBytesPerFrame = bytes_per_sample * 2;
//
// CMAudioFormatDescriptionCreate(kCFAllocatorDefault,&audioFormat,0,NULL,0,NULL,NULL,&audio_fmt_desc_);
//
// AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:audio_fmt_desc_];
//
// AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:((__bridge CMAudioFormatDescriptionRef)audioAssetTrack.formatDescriptions.firstObject)];
audioWriterInput.expectsMediaDataInRealTime = YES;
if([self.assetWriter canAddInput:audioWriterInput])
{
[self.assetWriter addInput:audioWriterInput];
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
/*
Create the asset reader to read the mp4 files on the disk
*/
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:composition error:&error];
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
/*
Add an output to be able to retrieve the video in the files
*/
AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;
if([assetReader canAddOutput:videoAssetTrackOutput])
{
[assetReader addOutput:videoAssetTrackOutput];
/*
Add an output to be able to retrieve the video in the files
*/
AVAssetReaderTrackOutput *audioAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:audioAssetTrack outputSettings:nil];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;
if([assetReader canAddOutput:audioAssetTrackOutput])
{
[assetReader addOutput:audioAssetTrackOutput];
[assetReader startReading];
/*
Read the mp4 files until the end and copy them in the output file
*/
dispatch_group_t encodingGroup = dispatch_group_create();
dispatch_group_enter(encodingGroup);
[audioWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
while ([audioWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef nextSampleBuffer = [audioAssetTrackOutput copyNextSampleBuffer];
if (nextSampleBuffer)
{
[audioWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
dispatch_group_leave(encodingGroup);
break;
}
}
}];
dispatch_group_enter(encodingGroup);
[videoWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef nextSampleBuffer = [videoAssetTrackOutput copyNextSampleBuffer];
if (nextSampleBuffer)
{
[videoWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
dispatch_group_leave(encodingGroup);
break;
}
}
}];
dispatch_group_wait(encodingGroup, DISPATCH_TIME_FOREVER);
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
__weak Encoder *weakself = self;
[self.assetWriter finishWritingWithCompletionHandler:^{
self.status = EncoderStatusCompleted;
handler();
weakself.assetWriter = nil;
self.encodingQueue = nil;
}];
}
else [self failWithError:error withCompletionHandler:handler];
}
With
- (dispatch_queue_t)encodingQueue
{
if(!_encodingQueue)
{
_encodingQueue = dispatch_queue_create("com.myProject.encoding", NULL);
}
return _encodingQueue;
}
This implementation was for my project TS2MP4 but I won't need it finally.
I want to change the container of .mov video files that I pick using
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was 3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}
i m trying to capture the frame buffer data and convert into video for my iphone game..
i m using AVAssetWriter to accomplish this thing.
The code is working fine on simulator but not on device itself.. on device it generates a black video
i m using the follwing code :
//This code initializes the AVAsetWriter and other things
- (void) testVideoWriter {
CGRect screenBoundst = [[UIScreen mainScreen] bounds];
//initialize global info
MOVIE_NAME = #"Documents/Movie5.mp4";
//CGSize size = CGSizeMake(screenBoundst.size.width, screenBoundst.size.height);
CGSize size = CGSizeMake(320, 480);
frameLength = CMTimeMake(1, 5);
currentTime = kCMTimeZero;
currentFrame = 0;
MOVIE_PATH = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:MOVIE_PATH]
fileType:AVFileTypeMPEG4 error:&error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width ], AVVideoWidthKey,
[NSNumber numberWithInt:size.height ], AVVideoHeightKey, nil];
writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:
writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
[adaptor retain];
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
flipVertical = CGAffineTransformRotate(flipVertical,(90.0*3.14f/180.0f));
[writerInput setTransform:flipVertical];
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
VIDEO_WRITER_IS_READY = true;
}
//this code capture the screen data
- (void) captureScreenVideo {
if (!writerInput.readyForMoreMediaData) {
return;
}
CGRect screenBounds = [[UIScreen mainScreen] bounds];
NSLog(#"width : %f Height : %f",screenBounds.size.width,screenBounds.size.height);
CGSize esize = CGSizeMake(screenBounds.size.width, screenBounds.size.height);
NSInteger myDataLength = esize.width * esize.height * 4;
GLuint *buffer = (GLuint *) malloc(myDataLength);
glReadPixels(0, 0, esize.width, esize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
CVPixelBufferRef pixel_buffer = NULL;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, esize.width,
esize.height, kCVPixelFormatType_32BGRA, (CFDictionaryRef) options,
&pixel_buffer);
NSParameterAssert(status == kCVReturnSuccess && pixel_buffer != NULL);
CVPixelBufferLockBaseAddress(pixel_buffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pixel_buffer);
NSParameterAssert(pixel_buffer != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, esize.width,
esize.height, 8, 4*esize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGAffineTransform flipVerticalq = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
flipVerticalq = CGAffineTransformRotate(flipVerticalq,(90.0*3.14f/180.0f));
CGContextConcatCTM(context, flipVerticalq);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
if(![adaptor appendPixelBuffer:pixel_buffer withPresentationTime:currentTime]) {
NSLog(#"FAIL");
} else {
NSLog(#"Success:%d", currentFrame);
currentTime = CMTimeAdd(currentTime, frameLength);
}
free(buffer);
CVPixelBufferRelease(pixel_buffer);
}
// this code saves the video to library on device itself
- (void) moveVideoToSavedPhotos {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
NSString *localVid = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];
NSURL* fileURL = [NSURL fileURLWithPath:localVid];
NSLog(#"movie saved %#",fileURL);
BOOL isVideoOK = UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(localVid);
if (NO == isVideoOK)
NSLog(#"Video at %# is not compatible",localVid);
else {
NSLog(#"video ok");
}
[library writeVideoAtPathToSavedPhotosAlbum:fileURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"%#: Error saving context: %#", [self class], [error localizedDescription]);
}
}];
[library release];
}
// following code stops the video recording after particular number of frames
if (VIDEO_WRITER_IS_READY) {
[self captureScreenVideo];
currentFrame++;
if (currentFrame > 500) {
VIDEO_WRITER_IS_READY = false;
[writerInput markAsFinished];
if (![videoWriter finishWriting]) {
NSLog(#"writing not finished");
}
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[writerInput release];
[videoWriter release];
NSLog(#"saving the file");
[self moveVideoToSavedPhotos];
}
}
also when the video captured on simulator is mirrored video...i dont have any idea where m wrong...please clarify me this thing... and hope you guys dont mind reading the whole code..