iOS MOV rendering producing odd results (obj-c) - ios

I'm creating an extension to an AS3 AIR application that will take in pixel data as ARGB32 and I want to render it to a .mov container using the H264 codec and then send it to the users camera roll. I'm currently running a test harness which takes an image from a local path (on my desktop) as a .jpeg, gets the pixel data out and renders it 50 times over into a short clip. The problem I'm having is when I render it on 400x200, it works fine however when I use an image of any other size, the frames come out very weirdly (images splayed diagonally, sometimes with a little black bar at the bottom right as if there are missing pixels).
I have a feeling its todo with the height and width as those are the only things that change from what I can see. Here is my code below:
- (void)initFunction:(NSString *)context width:(int)widthData height:(int)heightData fps:(int)fpsData
{
NSLog(#"Initializing...");
error = nil;
frameCount = 0; //Used for itterating
fps = fpsData; //FPS from AS3
width = widthData; //Width from AS3
height = heightData; //Height from AS3
numberOfSecondsPerFrame = 1.0f/(float)fps; //Seconds per frame
frameDuration = fps * numberOfSecondsPerFrame; //Frame showing time
imageSize = CGSizeMake(width, height); //imageSize from AS3
// Setup and remove pre-existing "render.mov"
fileMgr = [NSFileManager defaultManager];
documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"render.mov"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
{
NSLog(#"This is the first VS render on this device: %#", [error localizedDescription]);
}
else
{
NSLog(#"Removed previous render file in save path.");
}
NSLog(#"Starting render.");
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:width], AVVideoWidthKey,
[NSNumber numberWithInt:height], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
buffer = NULL;
NSLog(#"**************************************************");
}
-(void) sendToCameraRoll:(NSString *)videoPath {
UISaveVideoAtPathToSavedPhotosAlbum(videoPath, nil, NULL, nil);
}
//Take in URL from AIR
- (void)addFrameFromBytes:(CGImageRef *)FrameBytes {
UIImage *image = [UIImage imageWithCGImage:*FrameBytes];
CGImageRef cgImage = [image CGImage];
buffer = [self pixelBufferFromCGImage:cgImage];
append_ok = NO;
int j = 0;
while (!append_ok) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
frameCount = frameCount+1;
NSLog(#"Processing video frame %d",frameCount);
frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
if(!append_ok){
error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("Adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
}
j++;
}
if (!append_ok) {
printf("Error appending image %d times %d\n, with error.", frameCount, j);
}
}
//Take in Path from AIR
- (void)addFrameFromURL:(NSString *)FramePath {
UIImage *image = [UIImage imageWithContentsOfFile:FramePath];
CGImageRef cgImage = [image CGImage];
buffer = [self pixelBufferFromCGImage:cgImage];
append_ok = NO;
int j = 0;
while (!append_ok) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
frameCount = frameCount+1;
NSLog(#"Processing video frame %d",frameCount);
frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
if(!append_ok){
error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("Adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
}
j++;
}
if (!append_ok) {
printf("Error appending image %d times %d\n, with error.", frameCount, j);
}
}
- (void)saveVideoWithTrack:(NSString *)AudioTrackPath {
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Render complete."); ///Used to satisfy 64-bit devices
}];
sleep(1); //Required to avoid crash due to overload
NSLog(#"Render complete.");
//Audio File Addition
NSLog(#"Singing Tracks...");
AVMutableComposition *mixComposition = [AVMutableComposition composition];
//NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
//NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:AudioTrackPath];
//Get the final video path
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
//Create the final video amd export as MOV
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_output.mov"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
}
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset *audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
//Supported file types include...
//NSLog(#"Supported file types are %#", [_assetExport supportedFileTypes]);
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(){
NSString *exported = [[NSString alloc] initWithString:[outputFileUrl path]];
[self sendToCameraRoll:exported];
}];
NSLog(#"Saved to camera roll. %#", outputFilePath);
}
- (void)saveVideo {
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Finished"); //Used to satisfy 64-bit systems.
}];
sleep(1); //Required to avoid system crash
NSLog(#"Render complete.");
[self sendToCameraRoll:videoOutputPath];
NSLog(#"Saved to camera roll. %#", videoOutputPath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
width,
height,
kCVPixelFormatType_32ARGB,
( CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
size_t bytesPerRow = 4*width; //BytesPerRow 4 bytes each pixel.
CGContextRef imageContext = CGBitmapContextCreate(pxdata, width,
height, 8, bytesPerRow, rgbColorSpace,
kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipFirst);
CGContextDrawImage(imageContext, CGRectMake(0, 0, width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(imageContext);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
NSLog(#"bPR: %zd", pxbuffer);
return pxbuffer;
}
- (void)viewDidLoad {
[super viewDidLoad];
//[self initFunction:nil width:1138 height:640 fps:25];
//[self initFunction:nil width:910 height:512 fps:25];
//[self initFunction:nil width:400 height:200 fps:25];
[self initFunction:nil width:50 height:50 fps:25];
//find url...
//get pixel data...
//loop through writing it from the pixel data
for (int i = 1; i <= 50; i++) {
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/sequence/%06d.jpg", i];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image1.jpg"];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image2.jpg"];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/mini.jpg"];
NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/50x50.jpg"];
NSLog(#"%#", string);
[self addFrameFromURL:string];
}
[self saveVideo];
}
Edit: All images work if they are 400 x 200.
All also work at 800 x 400, 1600 x 800.
600 x 300 does not work.
Possibly aspect ratios?

Solved this issue using specific dimensions. AVAssetWriter only supports specific "valid" aspect ratios.

Related

create video from images of camera roll - iOS sdk

I have used following code to create video from the images.
This code works fine when i select the image from camera roll which is downloaded from web or the screenshot but the image selected which are taken from camera shows zoomed in in the movie.
I don't know what is wrong with the images of camera.
can anyone please help me resolve this issue.
-(IBAction)createV:(id)sender
{
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
CGSize imageSize = [DatabaseAccess getusersetsize];
double nospf =[[[NSUserDefaults standardUserDefaults] valueForKey:#"duration"] intValue];
NSUInteger fps = 10;
NSMutableArray *imageArray;// = [DatabaseAccess getimagelist:#"select imgname from tbl_userimage"];
NSArray* imagePaths = [DatabaseAccess getimagelist:#"select imgname,strftime('%d-%m-%Y', tdate) as tdate from tbl_userimage"];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
int i=0;
for (NSString* path in [imagePaths valueForKey:#"image"] )
{
if ([[NSUserDefaults standardUserDefaults] boolForKey:#"disdate"])
{
CGSize imgsize = [DatabaseAccess getusersetsize];
//[imageArray addObject:[[DatabaseAccess drawText:[[imagePaths valueForKey:#"date"] objectAtIndex:i] inImage:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]] atPoint:CGPointMake(imgsize.width-250,imgsize.height-60) ] fixOrientation]];
[imageArray addObject:[DatabaseAccess drawText:[[imagePaths valueForKey:#"date"] objectAtIndex:i] inImage:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]] atPoint:CGPointMake(imgsize.width-250,imgsize.height-60) ]];
}
else
{
[imageArray addObject:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]]];
NSLog(#"%#",path);
// [imageArray addObject:[UIImage imageNamed:path]];
}
i++;
}
[self exportImages:imageArray asVideoToPath:videoOutputPath withFrameSize:imageSize framesPerSecond:fps numberOfSecondsPerFrame:nospf];
}
- (void)exportImages:(NSMutableArray *)imageArray asVideoToPath:(NSString *)videoOutputPath withFrameSize:(CGSize)imageSize framesPerSecond:(NSUInteger)fps numberOfSecondsPerFrame:(double)numberOfSecondsPerFrame {
NSError *error = nil;
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSFileManager *fileMgr = [NSFileManager defaultManager];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imageArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imageArray count]);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) numberOfSecondsPerFrame);
//CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
// NSLog(#"%#",frameTime);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
[self playMovie:videoOutputPath];
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
// CGSize size = CGSizeMake(400, 200);
CGSize size = [DatabaseAccess getusersetsize];
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
//CGContextConcatCTM(context, frameTransform);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
I have solved image zoomed in issue using this code
-(UIImage*)scaleImage:(UIImage*)image toSize:(CGSize)newSize
{
UIGraphicsBeginImageContext(newSize);
[image drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
resize UIImage before converting it to CGImage,
make sure you resize image.width in multiple of 16.
CGSize your_size = CGSizeMake(1600, 800);
UIImage *tempImg = [self scaleImage:img toSize:your_size..];
buffer = [self pixelBufferFromCGImage:[tempImg CGImage]];

Converting array of images to mp4

I tried to convert an array of images to one mp4 file and it seems to work but when the video player starts to play it only shows a black view. I followed some other posts on here to convert images and this what I got. When I test it, all the pictures are converted and the video gets created. But when the video plays, it only shows a black view. Any help or clarification will be appreciated.
- (void) createVideoPlayer:(NSArray *)imagesArray
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(213, 320);
NSUInteger fps = 3;
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:320], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imagesArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imagesArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok)
{
NSError *error = videoWriter.error;
if(error!=nil)
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
printf("error appending image %d times %d\n, with error.", frameCount, j);
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
NSLog(#"Write Ended");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
// [self SaveVideo:outputFilePath];
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:outputFilePath]];
[moviePlayer.view setFrame:CGRectMake(0, 100, 320, 320)];
[moviePlayer prepareToPlay];
// And other options you can look through the documentation.
[self.view addSubview:moviePlayer.view];
[moviePlayer play];
}];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(300, 300);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
All UI updates must be on the main thread, double check the completionHandler is executing on main thread by calling:
NSLog(#"%d", [NSThread isMainThread]);
To execute on the mainthread use GCD:
dispatch_async(dispatch_get_main_queue(), ^{
// your UI update
});
Also you should always create a weak reference when capturing Self to avoid strong reference cycle by creating a local instance:
__weak typeof(self) weakSelf = self; // now use weakSelf.doSomething

Exported video using AVMutableComposition and AVAssetExportSession does not overlap video and audio

I'm trying to add a 1s audio track ("dummy_recording.m4a") to the beginning of my 3s video. But what I'm getting as a result right now is a 6s long video. It starts with the recording with a black background, then shows only the black background, then shows the video at the end. What am I doing wrong here? I just want the audio to overlap my video starting at the beginning.
-(void) addAudioToFileAtPath:(NSString *) filePath toPath:(NSString *)outFilePath completion:( void ( ^ ) () )completion
{
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:#"dummy_recording"
ofType:#"m4a"];
NSDictionary *audioInfoDictionary = #{#"audioFilePath": audioFilePath, #"audioDuration": [NSNumber numberWithFloat:1.0]};
NSArray *audioInfoArray = #[audioInfoDictionary];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"videoAsset.duration... value: %lld, timescale: %d, seconds: %lld", videoAsset.duration.value, videoAsset.duration.timescale, videoAsset.duration.value / videoAsset.duration.timescale);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero
error:&error];
CMTime audioStartTime = kCMTimeZero;
for (NSDictionary * audioInfo in audioInfoArray)
{
NSString * pathString = [audioInfo objectForKey:#"audioFilePath"];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"urlAsset.duration... value: %lld, timescale: %d, seconds: %lld", urlAsset.duration.value, urlAsset.duration.timescale, urlAsset.duration.value / urlAsset.duration.timescale);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) (([[audioInfo objectForKey:#"audioDuration"] floatValue] * RECORDING_FPS) + 0.5), RECORDING_FPS));
}
NSLog(#"composition.duration... value: %lld, timescale: %d, seconds: %lld", composition.duration.value, composition.duration.timescale, composition.duration.value / composition.duration.timescale);
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
completion();
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
Here's the result of the statements that log the durations. The composition's is 3s long, which is what I want, but it still exports wrongly to 6s long:
videoAsset.duration... value: 1840, timescale: 600, seconds: 3
urlAsset.duration... value: 87040, timescale: 44100, seconds: 1
composition.duration... value: 1840, timescale: 600, seconds: 3
I created the 3s video file from a still image. Here's the code:
NSString *documentsFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)
objectAtIndex:0];
NSString *path = [documentsFolder stringByAppendingPathComponent:#"test_video.mp4"];
DDLogInfo(#"path: %#", path);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *removeItemError;
BOOL success = [fileManager removeItemAtPath:path error:&removeItemError];
if (success) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError localizedDescription]);
}
NSString *path2 = [documentsFolder stringByAppendingPathComponent:#"test_video_with_audio.mp4"];
DDLogInfo(#"path2: %#", path);
NSError *removeItemError2;
BOOL success2 = [fileManager removeItemAtPath:path2 error:&removeItemError2];
if (success2) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError2 localizedDescription]);
}
//1. Wire the writer.
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
self.videoWriter = videoWriter;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:640], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings]; //retain should be removed if ARC
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
//2. Start a session
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero]; //use kCMTimeZero if unsure
UIImage *image = [UIImage imageNamed:#"dummy_square.jpg"];
CGImageRef cgImage = image.CGImage;
//3. Write some samples
//CVPixelBufferRef pixelBuffer = [self newPixelBufferFromCGImage:cgImage];
CVPixelBufferRef pixelBuffer = [self pixelBufferFromCGImage:cgImage];
BOOL result = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMakeWithSeconds(3.0, RECORDING_FPS)];
if (result == NO)
NSLog(#"failed to append buffer");
else
NSLog(#"appended buffer!");
if(pixelBuffer)
{
CVBufferRelease(pixelBuffer);
}
//4. Finish the session
[writerInput markAsFinished];
//[videoWriter endSessionAtSourceTime:…]; //optional can call finishWriting without specifiying endTime
self.library = [[ALAssetsLibrary alloc] init];
__weak ALAssetsLibrary *lib = self.library;
[videoWriter finishWritingWithCompletionHandler:^{
[self addAudioToFileAtPath:path toPath:path2 completion:^{
NSString *albumName = #"Test Album";
NSURL *pathUrl = [[NSURL alloc] initWithString:path2];
[lib addAssetsGroupAlbumWithName:albumName resultBlock:^(ALAssetsGroup *group) {
///checks if group previously created
if(group == nil){
//enumerate albums
[lib enumerateGroupsWithTypes:ALAssetsGroupAlbum
usingBlock:^(ALAssetsGroup *g, BOOL *stop)
{
//if the album is equal to our album
if ([[g valueForProperty:ALAssetsGroupPropertyName] isEqualToString:albumName]) {
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[g addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
}failureBlock:^(NSError *error){
}];
}else{
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[group addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
} failureBlock:^(NSError *error) {
}];
}];
}];
here i show u, how to convert from array of images and also adding music to video
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory12 = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory12 stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(480, 320);
NSUInteger fps = 1;
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//NSLog(#"fps :%f",(60.0-finalSec)/(float)[photoAlbumImages count]);
float numberOfSecondsPerFrame =60.0/(float)[photoAlbumImages count];
//NSLog(#"total fps :%f",numberOfSecondsPerFrame);
float frameDuration = fps * numberOfSecondsPerFrame;
NSLog(#"frame duration :%f",frameDuration);
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img12 in photoAlbumImages)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img12 CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,[photoAlbumImages count]);
CMTime frameTime12 = CMTimeMake(frameCount*frameDuration,fps);
// NSLog(#"%#",frameTime12);
NSLog(#"seconds = %f", CMTimeGetSeconds(frameTime12));
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime12];
CVPixelBufferRelease(buffer);
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.3];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog (#"finished writing");
}];
NSLog(#"Write Ended");
////////////////////////////////////////////////////////////////////////////
////////////// OK now add an audio file to move file /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
AVURLAsset* audioAsset;
if(pathURL==NULL){
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
}
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *str=[NSString stringWithFormat:#"project1/imgtovid%#.mov",[self getCurrentDateTimeAsNSString]];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:str];
// NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
NSURL *outputFileUrl1;
if(outputFileUrl!=nil){
NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
outputFileUrl1 = [NSURL URLWithString:webStringURL];
[self.project1Array addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
[positionArray addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
}
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// NSLog(#"duration - %f",videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack12 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if(pathURL==NULL){
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}else{
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[addAudioAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
_assetExport.outputURL = outputFileUrl;
NSLog(#"duration = %f", CMTimeGetSeconds(videoAsset.duration));
_assetExport.timeRange=CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
}
Here pixelbuffer from image
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef)photimage {
CGSize size = CGSizeMake(480, 320);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, size.width,
size.height), photimage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
If u want any more help please feel free to contact
I have same requirement of making movie from images and sounds. I have used this method to add movie and audio.
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//AUDIO FILE PATH
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"final_%#.m4a",appDelegate.storyName]];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:path];
//VIDEO FILE PATH
NSString* path1 = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:path1];
//FINAL VIDEO PATH
NSString* path2 = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"outputFile_%#.mp4",appDelegate.storyName]];
NSURL* outputFileUrl = [NSURL fileURLWithPath:path2];
NSLog(#"%#",path2);
if ([[NSFileManager defaultManager] fileExistsAtPath:path2])
[[NSFileManager defaultManager] removeItemAtPath:path2 error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, video_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:CMTimeMake(2,1) error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//VIDEO COMPLETED
}
];
}
Sorry the code is little bit messy but I think it will work. Please let me know if I need to explain it more.
MOVIE MAKING CODE EDITED
#pragma mark - Movie Making Code
-(void)makeMovie{
//making movie and audio code ll be here
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"temp.mp4"]];
NSLog(#"%#",path);
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
CGSize sz = CGSizeMake(480, 320);
[self writeImageAsMovie:appDelegate.imageArray toPath:path size:sz];
}
- (void)writeImageAsMovie:(NSMutableArray *)image toPath:(NSString*)path size:(CGSize)size
{
//last screen adding
UIImage *tempImg=[UIImage imageNamed:#"powered_by.png"];
[image addObject:tempImg];
//last screen adding end
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
/* CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];
*/
float nxttime=0;
float time=0;
CVPixelBufferRef buffer=nil;
#autoreleasepool {
for(int i=0;i<image.count;i++)
{
NSLog(#"%d",image.count);
if([writerInput isReadyForMoreMediaData])
{
if(i!=0 && i!=4)
{
NSLog(#"AUDIO DURATION:%#",appDelegate.audioDuration);
nxttime=nxttime+time;
time=([[appDelegate.audioDuration objectAtIndex:i-1] floatValue])+1;
}
else if(i==0)
{
nxttime=0;
time=3;
}
else if(i==4)
{
nxttime=nxttime+5;
}
buffer = [self pixelBufferFromCGImage:[[image objectAtIndex:i] CGImage] size:size];
CMTime frameTime = CMTimeMake(time, 1 );
CMTime lastTime=CMTimeMake(nxttime, 1);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
// NSLog(#"%d: ft:%# Lt:%# PT:%#",i,frameTime,lastTime,presentTime);
[adaptor appendPixelBuffer:buffer withPresentationTime:lastTime];
}
else
{
i=i-1;
}
}
}
//Finish the session:
[writerInput markAsFinished];
[self latestCombineVoices];
[videoWriter finishWriting];
}
- (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options,
&pxbuffer);
status=status;//Added to make the stupid compiler not show a stupid warning.
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
// CGContextTranslateCTM(context, 0, CGImageGetHeight(image));
//CGContextScaleCTM(context, 1.0, -1.0);//Flip vertically to account for different origin
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
In my case For example: Have img1,img2,img3 and sound1,sound2,sound3 So I make a movie where img1 will show for sound1_duration and so on.

ios AVAssetWriter output invalid video format

This code works for creating video from images. AVAssetWritter work fine without error and export output video - (.mov). But it fail in some case of image size.
120 x 160 - fail
180 x 640 , 240 x 320, 160 x 213.333, 96 x 128 - pass
P/s: Test on iPod 4th ios 5.1
This is screenshot http://postimg.org/image/gih1ofvbn/ of fail video for size 120 x 160, please help me solve this issue. Or any recommend for other video encoder library will be welcome. Thanks
This is my code
- (void) writeImagesAsMovie:(NSArray *)images withDelaySec:(float)delaySec toPath:(NSString*)path {
//delete file
NSFileManager *fileMgr = [[NSFileManager alloc] init];
NSError *error2;
if ([fileMgr removeItemAtPath:path error:&error2] != YES)
NSLog(#"Unable to delete file: %#", [error2 localizedDescription]);
UIImage *first = [images objectAtIndex:0];
CGSize frameSize = first.size;
NSLog(#"__create video: %f %f", frameSize.width, frameSize.height);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
if(error) {
NSLog(#"error creating AssetWriter: %#",[error description]);
}
int numPixels = frameSize.width * frameSize.height;
int bitsPerSecond;
float bitsPerPixel;
// Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
if ( numPixels < (640 * 480) )
bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
else
bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInteger:frameSize.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
writerInput.expectsMediaDataInRealTime = YES;
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer) {
CVBufferRelease(buffer);
}
float delayMili = delaySec * 25;
int fps = 30;
int i = 0;
for (UIImage *imgFrame in images)
{
if (adaptor.assetWriterInput.readyForMoreMediaData) {
i++;
CMTime frameTime = CMTimeMake(delayMili, fps);
CMTime lastTime = CMTimeMake(delayMili * i, fps);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
NSLog(#"__presentTime: %f", CMTimeGetSeconds(presentTime));
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) //failes on 3GS, but works on iphone 4
{
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if(buffer) {
CVBufferRelease(buffer);
}
} else {
NSLog(#"error");
i--;
}
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
NSLog(#"Movie created successfully");
}
The width of the video should be divisible by 16.

create video from images and save to photo album

I was making a video out of an array of images and an audio file
but i can't seem to save it to the camera roll
some times it saves but some times it doesn't and gives me a strange error
outputFile.mp4 cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=-12848 "Movie could not be played." UserInfo=0x7bd7370 {NSLocalizedDescription=Movie could not be played.}
This function creates video from array of images :
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 16;
int kRecordingFPS = 30;
UIImage * im = [UIImage imageNamed:#"IMG_0103.JPG"];
NSArray * imageArray = [[NSArray alloc]initWithObjects:im,im,im,im,im,im,im,im,im,im,im,im,im,im,im,im, nil];
for(UIImage * img in imageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) kRecordingFPS);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
[videoWriterInput release];
[videoWriter release];
[imageArray release];
}
This function adds the audio :
-(void)CompileFilesToMakeMovie{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString* audio_inputFileName = #"Rihanna-Take a Bow.mp3";
NSString* audio_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"a.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[_assetExport exportAsynchronouslyWithCompletionHandler:nil];
[audioAsset release];
[videoAsset release];
[_assetExport release];
}
The function calls :
- (void)viewDidLoad
{
[super viewDidLoad];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
UIImage * i = [UIImage imageNamed:#"IMG_0103.JPG"];
[self writeImagesToMovieAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"a.mp4"] withSize:CGSizeMake(i.size.width, i.size.height)];
NSString* exportVideoPath1 = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"a.mp4"];
NSString* exportVideoPath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"outputFile.mp4"];
UISaveVideoAtPathToSavedPhotosAlbum (exportVideoPath,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
In my case, the reason was that AVAssetWriter hasn't finished writing. Solved it with the following piece of code:
__block BOOL _finished = NO;
[assetWriter finishWritingWithCompletionHandler:^{
_finished = YES;
}];
while (!_finished) {
[NSThread sleepForTimeInterval:0.1];
}

Resources