Failure of combination of AVAssetWriter and AVAssetWriterInputPixelBufferAdaptor appendingPixelBuffer - ios

I am attempting to use a combination of the sample provided for encoding asset writer and the sample provided from pixelBufferFromCGImage to overlay a UIImage atop a AVAsset that I'm exporting.
The problem is, despite the True result of this call
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
The exported avasset is corrupted, yeilding an unexpected size and a subsequent access to it failing with 'This media may be damaged'.
The export itself, if I avoid attempting to use the appendPixelBuffer call, is successful as desired. However, locating it before the dispatch queue or within it yeilds the same failure.
Hopefully not redundantly posting here, but other samples here in stack overflow don't seem to address this particular combination failure. Thanks, code below
Export code:
AVAsset *sourceAsset = [AVAsset assetWithURL:outputUrl];
NSError *error = nil;
NSString *fileName = [NSString stringWithFormat:#"non_transform_%f.mov", [[NSDate date] timeIntervalSince1970]];
NSString *combinedPath = [NSString stringWithFormat:#"file://localhost%#/%#", [[GroupDiskManager sharedManager] getFolderPath], fileName];
NSURL *outputURL = [NSURL URLWithString:combinedPath];
NSLog(#"combined path: %#", combinedPath);
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
AVAssetTrack *videoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:1280], AVVideoWidthKey,
[NSNumber numberWithInt:720], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:1280] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:720] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:attributes];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:sourceAsset error:&aerror];
[reader addOutput:asset_reader_output];
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:sourceAsset error:&error] retain];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:audioReaderOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
CVPixelBufferRef buffer = [ImageToMovieManager pixelBufferFromCGImage:[UIImage imageNamed:#"234_1280x720_3.jpg"].CGImage size:CGSizeMake(1280, 720)];
BOOL theResult = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (theResult == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer) {
CVBufferRelease(buffer);
}
dispatch_queue_t _processingQueue = dispatch_queue_create("_processingQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
NSLog(#"requestMediaDataWhenReadyOnQueue");
[self retain];
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
NSLog(#" result == nil Cancel!");
NSLog(#"videoWriter.error: %#", videoWriter.error);
[reader cancelReading];
break;
}
} else {
NSLog(#"[videoWriterInput markAsFinished]");
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusReading:
NSLog(#"reading");
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusCompleted:
NSLog(#"AVAssetReaderStatusCompleted");
[audioReader startReading];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(nextBuffer = [audioReaderOutput copyNextSampleBuffer])) {
if (nextBuffer) {
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}else{
[audioWriterInput markAsFinished];
switch ([audioReader status]) {
case AVAssetReaderStatusCompleted:
NSLog(#"AVAssetReaderStatusCompleted!!");
[videoWriter finishWriting];
[VideoManager videoSavedWithURL:outputURL withError:(NSError *)error];
break;
}
}
}
}
];
break;
case AVAssetReaderStatusFailed:
NSLog(#"AVAssetReaderStatusFailed");
[videoWriter cancelWriting];
break;
}
break;
}
}
}
];
pixelBufferFromCGImageCode
CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipLast);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;

At the very least, the pixelFormat should be specified as kCVPixelFormatType_32BGRA not kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange.

Related

CMSampleBufferRef kCMSampleBufferAttachmentKey_TrimDurationAtStart crash

This has bothering me for a while. i have video convert to convert video into “.mp4” format. But there is a crash that happens on some video but not all.
here is the crash log
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:]
Cannot append sample buffer: First input buffer must have an appropriate kCMSampleBufferAttachmentKey_TrimDurationAtStart since the codec has encoder delay'
here is my codes:
NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:[self getVideoName]] stringByAppendingString:#".mp4"]];
AVAssetTrack *videoTrack = [[self.avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = videoTrack.naturalSize;
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
self.assetWriter = [[AVAssetWriter alloc] initWithURL:uploadURL fileType:AVFileTypeQuickTimeMovie error:nil];
[self.assetWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
self.assetReader = [[AVAssetReader alloc] initWithAsset:self.avAsset error:nil];
[self.assetReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[self.assetWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[self.avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:self.avAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[self.assetWriter startWriting];
[self.assetReader startReading];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue
usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef sampleBuffer = NULL;
if ([self.assetReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
if ([self.assetReader status] == AVAssetReaderStatusCompleted)
{
//start writing from audio reader
[audioReader startReading];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer]))
{
if (sampleBuffer) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
}
CFRelease(sampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted) {
[self.assetWriter finishWritingWithCompletionHandler:^(){
[self createLiveTrailerApiForVideoId:video.dbId];
}];
}
}
}
}];
}
}
}
}];
and this is the part that causing the crash
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer]))
{
if (sampleBuffer) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
}
CFRelease(sampleBuffer);
}
I have been searching around, seems like I need to set the 'kCMSampleBufferAttachmentKey_TrimDurationAtStart' to first buffer, but can't find any example about how to set this value.
Please advise. Thanks!
Just like this:
CFDictionaryRef dict = NULL;
if (firstBuffer) {
firstBuffer = NO;
dict = CMTimeCopyAsDictionary(CMTimeMake(1024, 44100), kCFAllocatorDefault);
CMSetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_TrimDurationAtStart, dict, kCMAttachmentMode_ShouldNotPropagate);
}
According to this thread on Apple Mailing lists I'd suggest you
to check on CMAttachment Reference
just for clarity, you should rename your `sampleBuffer variable in the inner while
did you try to pass a dictionary containing in kCMSampleBufferAttachmentKey_TrimDurationAtStart when initializing your audioReaderOuput ? (not sure how it should be generated

Converting array of images to mp4

I tried to convert an array of images to one mp4 file and it seems to work but when the video player starts to play it only shows a black view. I followed some other posts on here to convert images and this what I got. When I test it, all the pictures are converted and the video gets created. But when the video plays, it only shows a black view. Any help or clarification will be appreciated.
- (void) createVideoPlayer:(NSArray *)imagesArray
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(213, 320);
NSUInteger fps = 3;
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:320], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imagesArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imagesArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok)
{
NSError *error = videoWriter.error;
if(error!=nil)
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
printf("error appending image %d times %d\n, with error.", frameCount, j);
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
NSLog(#"Write Ended");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
// [self SaveVideo:outputFilePath];
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:outputFilePath]];
[moviePlayer.view setFrame:CGRectMake(0, 100, 320, 320)];
[moviePlayer prepareToPlay];
// And other options you can look through the documentation.
[self.view addSubview:moviePlayer.view];
[moviePlayer play];
}];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(300, 300);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
All UI updates must be on the main thread, double check the completionHandler is executing on main thread by calling:
NSLog(#"%d", [NSThread isMainThread]);
To execute on the mainthread use GCD:
dispatch_async(dispatch_get_main_queue(), ^{
// your UI update
});
Also you should always create a weak reference when capturing Self to avoid strong reference cycle by creating a local instance:
__weak typeof(self) weakSelf = self; // now use weakSelf.doSomething

Adding Audio to AVAsset

I've been attempting to use AVAssetWriter to crop and export a video in an iOS app. Everything has been going swimmingly up until I attempted to add audio to the output. The current code (see below) returns the following error
NSInvalidArgumentException', reason: '* -[AVAssetReader startReading] cannot be called again after reading has already started'
From what I've written below, this should be using a different reader than the one that was initialized, and so I don't understand the issue. This may be a simple issue of needing another pair of eyes on it, so here's the code.
-(void)cropVideoAsset:(NSURL *)fileURL
{
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"croppedVideo.mov"];
toUrl = [NSURL fileURLWithPath:outputPath];
if([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
{[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];}
AVAsset *asset = [AVAsset assetWithURL:fileURL];
NSLog(#"asset: %f", CMTimeGetSeconds(asset.duration));
NSDictionary* compressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoProfileLevelH264Main31, AVVideoProfileLevelKey,
[NSNumber numberWithInt:2500000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt: 5], AVVideoMaxKeyFrameIntervalKey,
nil];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:500], AVVideoWidthKey,
[NSNumber numberWithInt:500], AVVideoHeightKey,
AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
compressionSettings, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
writerInput.expectsMediaDataInRealTime = true;
NSError *error;
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:toUrl fileType:AVFileTypeMPEG4 error:&error];
[assetWriter addInput:writerInput];
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
writerInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoOptions];
[assetReader addOutput:asset_reader_output];
//audio setup
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil];
AVAssetWriterInput *audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
audioWriterInput.expectsMediaDataInRealTime = true;
[assetWriter addInput:audioWriterInput];
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[assetReader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
while(writerInput.readyForMoreMediaData)
{
switch(assetReader.status)
{
case AVAssetReaderStatusReading:
{
CMSampleBufferRef sampleBuffer = [asset_reader_output copyNextSampleBuffer];
if (sampleBuffer)
{
BOOL result = [writerInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result)
{
[SVProgressHUD dismiss];
[self enableView];
NSString *errorMessage = [NSString stringWithFormat:#"We're sorry, an error occurred during the video capture.\n\n%#\n\nPlease try again.", assetWriter.error.localizedDescription];
[UIAlertView simpleAlertWithTitle:#"Video Error" andMessage:errorMessage];
[assetReader cancelReading];
break;
}
}
}
break;
case AVAssetReaderStatusCompleted:
{
[self handleAudioWithReader:audioReader writer:assetWriter input:audioWriterInput andOutput:readerOutput];
}
break;
case AVAssetReaderStatusFailed:
[SVProgressHUD dismiss];
[self enableView];
[UIAlertView simpleAlertWithTitle:#"Video Error" andMessage:#"We're sorry, an error occurred during the video capture. Please try again."];
[assetWriter cancelWriting];
break;
}
}
}];
}
and the requisite call
-(void)handleAudioWithReader:(AVAssetReader *)audioReader writer:(AVAssetWriter *)videoWriter input:(AVAssetWriterInput *)audioWriterInput andOutput:(AVAssetReaderOutput *)readerOutput
{
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(#"Asset Writer ready :%d", audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer;
if([audioReader status] == AVAssetReaderStatusReading && (nextBuffer = [readerOutput copyNextSampleBuffer]))
{
if (nextBuffer)
{
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
else
{
[audioWriterInput markAsFinished];
switch ([audioReader status])
{
case AVAssetReaderStatusCompleted:
[videoWriter finishWriting];
[audioWriterInput markAsFinished];
dispatch_async(dispatch_get_main_queue(),^{[self saveAssetToLibraryAtURL:toUrl];});
break;
}
}
}
}];
}

iOS green border when creating mov from UIImages

I'm using the code below to take a series of UIImage's and convert them into a mov. For some reason I keep on getting a green border at the bottom or on the right side depending on whether it's a landscape or portrait photo. The images are 215x320.
How can I remove the green borders? Is there a better way of creating .mov from UIImages?
- (void)createMov
{
UIImage *first = [self.videoFrames objectAtIndex:0];
CGSize frameSize = first.size;
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:fileName] fileType:AVFileTypeQuickTimeMovie
error:&error];
if (error) {
NSLog(#"error creating AssetWriter: %#",[error description]);
}
int numPixels = first.size.width * first.size.height;
int bitsPerSecond = numPixels * 11.04;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
BOOL start = [videoWriter startWriting];
NSLog(#"Session started? %d", start);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
// Writing.
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
int fps = [Utils frameRate];
int i = 0;
for (UIImage *imgFrame in self.videoFrames) {
i = [self addFrame:adaptor videoWriter:videoWriter buffer:buffer imgFrame:imgFrame i:i fps:fps];
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
}
- (int)addFrame:(AVAssetWriterInputPixelBufferAdaptor *)adaptor videoWriter:(AVAssetWriter *)videoWriter buffer:(CVPixelBufferRef)buffer imgFrame:(UIImage *)imgFrame i:(int)i fps: (float)fps
{
if (adaptor.assetWriterInput.readyForMoreMediaData) {
i++;
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime=CMTimeMake(i, fps);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) { //fails on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if (buffer) {
CVBufferRelease(buffer);
}
[NSThread sleepForTimeInterval:0.05];
} else {
NSLog(#"error");
i--;
}
[NSThread sleepForTimeInterval:0.02];
return i;
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
size_t rowBytes = CVPixelBufferGetBytesPerRow(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, rowBytes, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
It seems i got the similar issue, and I just got an idea come from my head after read two posts as below.
http://forum.videohelp.com/threads/314973-green-line-at-bottom-of-video-window
http://en.wikipedia.org/wiki/H.264/MPEG-4_AVC#Features
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithUnsignedLong:sourceWidth], AVVideoWidthKey,
[NSNumber numberWithUnsignedLong:sourceHeight], AVVideoHeightKey,
nil];
AVAssetWriterInput *writerInput =
[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
Before start write data by AVAssertWriter, please reset sourceWidth and sourceHeight with this method:
size_t fixedLineValue(size_t sourceValue){
size_t divide = sourceValue%4;
size_t finalValue = sourceValue;
if (divide) {
finalValue=(sourceValue/4+1)*4;
}
return finalValue;
}
I cant's finger out clearly why do this, but after that, the ugly green borders were removed. Please comment if there's a better answer and let's know. Thanks.

how to save video programmatically on iPad1 iOS SDK

i have array of images and i converted them to movie video and now i want to now how to save that converted video to ipad.
can i save that converted video to iPad photo library
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
CFDataRef imgData = (CFDataRef)[array objectAtIndex:0];
CGDataProviderRef imgDataProvider = CGDataProviderCreateWithCFData (imgData);
CGImageRef image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault);
buffer = [self pixelBufferFromCGImage:image1];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:......
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
Try this open source component:-
https://www.cocoacontrols.com/controls/iqprojectvideo
This might help you. Use it according to your requirement.
Try with following code :
[_operationQueue addOperationWithBlock:^{
NSInteger i = 0;
NSString *path = [NSTemporaryDirectory() stringByAppendingFormat:#"%d.png",i];
UIImage *image;
NSDate *startDate;
while ((image = [UIImage imageWithContentsOfFile:path]))
{
while (1)
{
if (writerInput.readyForMoreMediaData == NO)
{
sleep(0.01);
continue;
}
else
{
//First time only
if (buffer == NULL)
{
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
startDate = [_dates objectAtIndex:i];
}
buffer = [IQProjectVideo pixelBufferFromCGImage:image.CGImage];
if (buffer)
{
if(i<_dates.count){
NSDate *currentDate = [_dates objectAtIndex:i];
Float64 interval = [currentDate timeIntervalSinceDate:startDate];
int32_t timeScale;
if (i == 0)
{
timeScale = 1.0/([[_dates objectAtIndex:i+1] timeIntervalSinceDate:currentDate]);
}
else
{
timeScale = 1.0/([currentDate timeIntervalSinceDate:[_dates objectAtIndex:i-1]]);
}
/**/
CMTime presentTime=CMTimeMakeWithSeconds(interval, MAX(33, timeScale));
// NSLog(#"presentTime:%#",(__bridge NSString *)CMTimeCopyDescription(kCFAllocatorDefault, presentTime));
if (_progressBlock != NULL)
{
dispatch_sync(dispatch_get_main_queue(), ^{
_progressBlock((CGFloat)i/(CGFloat)currentIndex);
});
}
// append buffer
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
CVPixelBufferRelease(buffer);
}
}
break;
}
}
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
path = [NSTemporaryDirectory() stringByAppendingFormat:#"%d.png",++i];
}
//Finish the session:
[writerInput markAsFinished];
if ([videoWriter respondsToSelector:#selector(finishWritingWithCompletionHandler:)])
{
[videoWriter finishWritingWithCompletionHandler:^{
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}];
}
else
{
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}
NSDictionary *fileAttrubutes = [[NSFileManager defaultManager] attributesOfItemAtPath:_path error:nil];
NSDictionary *dictInfo = [NSDictionary dictionaryWithObjectsAndKeys:
_path,IQFilePathKey,
[fileAttrubutes objectForKey:NSFileSize], IQFileSizeKey,
[fileAttrubutes objectForKey:NSFileCreationDate], IQFileCreateDateKey,
nil];
if (_completionBlock != NULL)
{
dispatch_sync(dispatch_get_main_queue(), ^{
_completionBlock(dictInfo,videoWriter.error);
});
}
NSString *openCommand = [NSString stringWithFormat:#"/usr/bin/open \"%#\"", NSTemporaryDirectory()];
system([openCommand fileSystemRepresentation]);

Resources