create video from images of camera roll - iOS sdk - ios

I have used following code to create video from the images.
This code works fine when i select the image from camera roll which is downloaded from web or the screenshot but the image selected which are taken from camera shows zoomed in in the movie.
I don't know what is wrong with the images of camera.
can anyone please help me resolve this issue.
-(IBAction)createV:(id)sender
{
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
CGSize imageSize = [DatabaseAccess getusersetsize];
double nospf =[[[NSUserDefaults standardUserDefaults] valueForKey:#"duration"] intValue];
NSUInteger fps = 10;
NSMutableArray *imageArray;// = [DatabaseAccess getimagelist:#"select imgname from tbl_userimage"];
NSArray* imagePaths = [DatabaseAccess getimagelist:#"select imgname,strftime('%d-%m-%Y', tdate) as tdate from tbl_userimage"];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
int i=0;
for (NSString* path in [imagePaths valueForKey:#"image"] )
{
if ([[NSUserDefaults standardUserDefaults] boolForKey:#"disdate"])
{
CGSize imgsize = [DatabaseAccess getusersetsize];
//[imageArray addObject:[[DatabaseAccess drawText:[[imagePaths valueForKey:#"date"] objectAtIndex:i] inImage:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]] atPoint:CGPointMake(imgsize.width-250,imgsize.height-60) ] fixOrientation]];
[imageArray addObject:[DatabaseAccess drawText:[[imagePaths valueForKey:#"date"] objectAtIndex:i] inImage:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]] atPoint:CGPointMake(imgsize.width-250,imgsize.height-60) ]];
}
else
{
[imageArray addObject:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]]];
NSLog(#"%#",path);
// [imageArray addObject:[UIImage imageNamed:path]];
}
i++;
}
[self exportImages:imageArray asVideoToPath:videoOutputPath withFrameSize:imageSize framesPerSecond:fps numberOfSecondsPerFrame:nospf];
}
- (void)exportImages:(NSMutableArray *)imageArray asVideoToPath:(NSString *)videoOutputPath withFrameSize:(CGSize)imageSize framesPerSecond:(NSUInteger)fps numberOfSecondsPerFrame:(double)numberOfSecondsPerFrame {
NSError *error = nil;
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSFileManager *fileMgr = [NSFileManager defaultManager];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imageArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imageArray count]);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) numberOfSecondsPerFrame);
//CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
// NSLog(#"%#",frameTime);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
[self playMovie:videoOutputPath];
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
// CGSize size = CGSizeMake(400, 200);
CGSize size = [DatabaseAccess getusersetsize];
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
//CGContextConcatCTM(context, frameTransform);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}

I have solved image zoomed in issue using this code
-(UIImage*)scaleImage:(UIImage*)image toSize:(CGSize)newSize
{
UIGraphicsBeginImageContext(newSize);
[image drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
resize UIImage before converting it to CGImage,
make sure you resize image.width in multiple of 16.
CGSize your_size = CGSizeMake(1600, 800);
UIImage *tempImg = [self scaleImage:img toSize:your_size..];
buffer = [self pixelBufferFromCGImage:[tempImg CGImage]];

Related

How to Animate Images while converting to Video

I want to animate images smoothly while converting them to video. Dispite of searching SO, I am unable to understand how to achive it. I tried changing the Rotation angle(CGAffineTransformRotation), Translations and Scaling but didn't found a way to for the smooth animations. Heres how I am converting array of photos to video :
- (void)createVideoWithArrayImages:(NSMutableArray*)images size:(CGSize)size time:(float)time output:(NSURL*)output {
//getting a random path
NSError *error;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:output fileType:AVFileTypeMPEG4 error: &error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: videoWriterInput sourcePixelBufferAttributes:nil];
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput: videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
//convert uiimage to CGImage.
NSInteger fps = 30;
int frameCount = 0;
for(UIImage *img in images) {
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
//UIImage * img = frm._imageFrame;
buffer = [self videoPixelBufferFromCGImage:[img CGImage] andSize:size andAngle:(int)[images indexOfObject:img]];
double numberOfSecondsPerFrame = time / images.count;
double frameDuration = fps * numberOfSecondsPerFrame;
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < fps) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,(int)[images count]);
CMTime frameTime = CMTimeMake(frameCount * frameDuration,(int32_t) fps);
NSLog(#"Frame Time : %f", CMTimeGetSeconds(frameTime));
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok) {
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
NSLog(#"**************************************************");
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
videoWriter = nil;
if(buffer != NULL)
CVPixelBufferRelease(buffer);
NSLog(#"************ write standard video successful ************");
}
Here CVPixelBufferRef is returned as follows :
- (CVPixelBufferRef)videoPixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size andAngle:(int)angle {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, size.width, size.height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
I tried adding Translations to the CVPixelBufferRef but thing didn't worked out for me. Any guide, any help would be very useful.
YOu don't specify what you want to achieve, but I've used AVVideoCompositionCoreAnimationTool to include animated CALayers in a AVMutableVideoComposition.
Here is the reference to that class
Hope that helps!

Converting array of images to mp4

I tried to convert an array of images to one mp4 file and it seems to work but when the video player starts to play it only shows a black view. I followed some other posts on here to convert images and this what I got. When I test it, all the pictures are converted and the video gets created. But when the video plays, it only shows a black view. Any help or clarification will be appreciated.
- (void) createVideoPlayer:(NSArray *)imagesArray
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(213, 320);
NSUInteger fps = 3;
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:320], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imagesArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imagesArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok)
{
NSError *error = videoWriter.error;
if(error!=nil)
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
printf("error appending image %d times %d\n, with error.", frameCount, j);
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
NSLog(#"Write Ended");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
// [self SaveVideo:outputFilePath];
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:outputFilePath]];
[moviePlayer.view setFrame:CGRectMake(0, 100, 320, 320)];
[moviePlayer prepareToPlay];
// And other options you can look through the documentation.
[self.view addSubview:moviePlayer.view];
[moviePlayer play];
}];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(300, 300);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
All UI updates must be on the main thread, double check the completionHandler is executing on main thread by calling:
NSLog(#"%d", [NSThread isMainThread]);
To execute on the mainthread use GCD:
dispatch_async(dispatch_get_main_queue(), ^{
// your UI update
});
Also you should always create a weak reference when capturing Self to avoid strong reference cycle by creating a local instance:
__weak typeof(self) weakSelf = self; // now use weakSelf.doSomething

Images are cropped while converting array of images to video

In my application , i am converting array of images to video. For that, i am using the below code.
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
NSUInteger fps = 30;
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:288], AVVideoWidthKey,
[NSNumber numberWithInt:352], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in finalArrayVal)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[finalArrayVal count]);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
It converts the array of images to video file. But it crops the part of image exactly like below when i am trying to convert it to video.
Input Image :
Output video screenshot:
You can see upper portion of image is highly ignored. Note that i have set AVCaptureSessionPreset352x288 for defaultAVCaptureSessionPreset in camera. Here i need convert full image as frames in video. Can anyone provide the solution to accomplish the above stated issue.
After the study , i replaced the below line to send the additional parameter (size of the image )
buffer = [myController pixelBufferFromImage:[img CGImage] andSize:imageSize];
Instead of this :
buffer = [self pixelBufferFromCGImage:[img CGImage]];
And added this method in myController
+ (CVPixelBufferRef) pixelBufferFromImage: (CGImageRef) image andSize:(CGSize) size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}

iOS MOV rendering producing odd results (obj-c)

I'm creating an extension to an AS3 AIR application that will take in pixel data as ARGB32 and I want to render it to a .mov container using the H264 codec and then send it to the users camera roll. I'm currently running a test harness which takes an image from a local path (on my desktop) as a .jpeg, gets the pixel data out and renders it 50 times over into a short clip. The problem I'm having is when I render it on 400x200, it works fine however when I use an image of any other size, the frames come out very weirdly (images splayed diagonally, sometimes with a little black bar at the bottom right as if there are missing pixels).
I have a feeling its todo with the height and width as those are the only things that change from what I can see. Here is my code below:
- (void)initFunction:(NSString *)context width:(int)widthData height:(int)heightData fps:(int)fpsData
{
NSLog(#"Initializing...");
error = nil;
frameCount = 0; //Used for itterating
fps = fpsData; //FPS from AS3
width = widthData; //Width from AS3
height = heightData; //Height from AS3
numberOfSecondsPerFrame = 1.0f/(float)fps; //Seconds per frame
frameDuration = fps * numberOfSecondsPerFrame; //Frame showing time
imageSize = CGSizeMake(width, height); //imageSize from AS3
// Setup and remove pre-existing "render.mov"
fileMgr = [NSFileManager defaultManager];
documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"render.mov"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
{
NSLog(#"This is the first VS render on this device: %#", [error localizedDescription]);
}
else
{
NSLog(#"Removed previous render file in save path.");
}
NSLog(#"Starting render.");
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:width], AVVideoWidthKey,
[NSNumber numberWithInt:height], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
buffer = NULL;
NSLog(#"**************************************************");
}
-(void) sendToCameraRoll:(NSString *)videoPath {
UISaveVideoAtPathToSavedPhotosAlbum(videoPath, nil, NULL, nil);
}
//Take in URL from AIR
- (void)addFrameFromBytes:(CGImageRef *)FrameBytes {
UIImage *image = [UIImage imageWithCGImage:*FrameBytes];
CGImageRef cgImage = [image CGImage];
buffer = [self pixelBufferFromCGImage:cgImage];
append_ok = NO;
int j = 0;
while (!append_ok) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
frameCount = frameCount+1;
NSLog(#"Processing video frame %d",frameCount);
frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
if(!append_ok){
error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("Adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
}
j++;
}
if (!append_ok) {
printf("Error appending image %d times %d\n, with error.", frameCount, j);
}
}
//Take in Path from AIR
- (void)addFrameFromURL:(NSString *)FramePath {
UIImage *image = [UIImage imageWithContentsOfFile:FramePath];
CGImageRef cgImage = [image CGImage];
buffer = [self pixelBufferFromCGImage:cgImage];
append_ok = NO;
int j = 0;
while (!append_ok) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
frameCount = frameCount+1;
NSLog(#"Processing video frame %d",frameCount);
frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
if(!append_ok){
error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("Adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
}
j++;
}
if (!append_ok) {
printf("Error appending image %d times %d\n, with error.", frameCount, j);
}
}
- (void)saveVideoWithTrack:(NSString *)AudioTrackPath {
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Render complete."); ///Used to satisfy 64-bit devices
}];
sleep(1); //Required to avoid crash due to overload
NSLog(#"Render complete.");
//Audio File Addition
NSLog(#"Singing Tracks...");
AVMutableComposition *mixComposition = [AVMutableComposition composition];
//NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
//NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:AudioTrackPath];
//Get the final video path
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
//Create the final video amd export as MOV
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_output.mov"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
}
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset *audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
//Supported file types include...
//NSLog(#"Supported file types are %#", [_assetExport supportedFileTypes]);
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(){
NSString *exported = [[NSString alloc] initWithString:[outputFileUrl path]];
[self sendToCameraRoll:exported];
}];
NSLog(#"Saved to camera roll. %#", outputFilePath);
}
- (void)saveVideo {
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Finished"); //Used to satisfy 64-bit systems.
}];
sleep(1); //Required to avoid system crash
NSLog(#"Render complete.");
[self sendToCameraRoll:videoOutputPath];
NSLog(#"Saved to camera roll. %#", videoOutputPath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
width,
height,
kCVPixelFormatType_32ARGB,
( CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
size_t bytesPerRow = 4*width; //BytesPerRow 4 bytes each pixel.
CGContextRef imageContext = CGBitmapContextCreate(pxdata, width,
height, 8, bytesPerRow, rgbColorSpace,
kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipFirst);
CGContextDrawImage(imageContext, CGRectMake(0, 0, width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(imageContext);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
NSLog(#"bPR: %zd", pxbuffer);
return pxbuffer;
}
- (void)viewDidLoad {
[super viewDidLoad];
//[self initFunction:nil width:1138 height:640 fps:25];
//[self initFunction:nil width:910 height:512 fps:25];
//[self initFunction:nil width:400 height:200 fps:25];
[self initFunction:nil width:50 height:50 fps:25];
//find url...
//get pixel data...
//loop through writing it from the pixel data
for (int i = 1; i <= 50; i++) {
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/sequence/%06d.jpg", i];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image1.jpg"];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image2.jpg"];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/mini.jpg"];
NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/50x50.jpg"];
NSLog(#"%#", string);
[self addFrameFromURL:string];
}
[self saveVideo];
}
Edit: All images work if they are 400 x 200.
All also work at 800 x 400, 1600 x 800.
600 x 300 does not work.
Possibly aspect ratios?
Solved this issue using specific dimensions. AVAssetWriter only supports specific "valid" aspect ratios.

iOS green border when creating mov from UIImages

I'm using the code below to take a series of UIImage's and convert them into a mov. For some reason I keep on getting a green border at the bottom or on the right side depending on whether it's a landscape or portrait photo. The images are 215x320.
How can I remove the green borders? Is there a better way of creating .mov from UIImages?
- (void)createMov
{
UIImage *first = [self.videoFrames objectAtIndex:0];
CGSize frameSize = first.size;
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:fileName] fileType:AVFileTypeQuickTimeMovie
error:&error];
if (error) {
NSLog(#"error creating AssetWriter: %#",[error description]);
}
int numPixels = first.size.width * first.size.height;
int bitsPerSecond = numPixels * 11.04;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
BOOL start = [videoWriter startWriting];
NSLog(#"Session started? %d", start);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
// Writing.
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
int fps = [Utils frameRate];
int i = 0;
for (UIImage *imgFrame in self.videoFrames) {
i = [self addFrame:adaptor videoWriter:videoWriter buffer:buffer imgFrame:imgFrame i:i fps:fps];
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
}
- (int)addFrame:(AVAssetWriterInputPixelBufferAdaptor *)adaptor videoWriter:(AVAssetWriter *)videoWriter buffer:(CVPixelBufferRef)buffer imgFrame:(UIImage *)imgFrame i:(int)i fps: (float)fps
{
if (adaptor.assetWriterInput.readyForMoreMediaData) {
i++;
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime=CMTimeMake(i, fps);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) { //fails on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if (buffer) {
CVBufferRelease(buffer);
}
[NSThread sleepForTimeInterval:0.05];
} else {
NSLog(#"error");
i--;
}
[NSThread sleepForTimeInterval:0.02];
return i;
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
size_t rowBytes = CVPixelBufferGetBytesPerRow(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, rowBytes, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
It seems i got the similar issue, and I just got an idea come from my head after read two posts as below.
http://forum.videohelp.com/threads/314973-green-line-at-bottom-of-video-window
http://en.wikipedia.org/wiki/H.264/MPEG-4_AVC#Features
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithUnsignedLong:sourceWidth], AVVideoWidthKey,
[NSNumber numberWithUnsignedLong:sourceHeight], AVVideoHeightKey,
nil];
AVAssetWriterInput *writerInput =
[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
Before start write data by AVAssertWriter, please reset sourceWidth and sourceHeight with this method:
size_t fixedLineValue(size_t sourceValue){
size_t divide = sourceValue%4;
size_t finalValue = sourceValue;
if (divide) {
finalValue=(sourceValue/4+1)*4;
}
return finalValue;
}
I cant's finger out clearly why do this, but after that, the ugly green borders were removed. Please comment if there's a better answer and let's know. Thanks.

Resources