Related
I am trying to extract 2 frames per second from a video using generateCGImagesAsynchronouslyForTimes. But my app crashes. I am monitoring the memory usage but its not going any way up than 14 mb.
Here is the code:
- (void) createImagesFromVideoURL:(NSURL *) videoUrl atFPS: (int) reqiuredFPS completionBlock: (void(^) (NSMutableArray *frames, CGSize frameSize)) block
{
NSMutableArray *requiredFrames = [[NSMutableArray alloc] init];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.requestedTimeToleranceAfter = kCMTimeZero;
generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.appliesPreferredTrackTransform = YES;
UIImage *sampleGeneratedImage;
for (Float64 i = 0; i < CMTimeGetSeconds(asset.duration) * reqiuredFPS ; i++)
{
CMTime time = CMTimeMake(i, reqiuredFPS);
NSError *err;
CMTime actualTime;
CGImageRef image = [generator copyCGImageAtTime:time actualTime:&actualTime error:&err];
if (! err)
{
sampleGeneratedImage = [[UIImage alloc] initWithCGImage:image];
break;
}
}
//Get the maximum size from 1 frame
generator.maximumSize = [self getMultipleOf16AspectRatioForCurrentFrameSize:sampleGeneratedImage.size];
NSMutableArray *requestedFrameTimes = [[NSMutableArray alloc] init];
for (Float64 i = 0; i < CMTimeGetSeconds(asset.duration) * reqiuredFPS ; i++)
{
CMTime time = CMTimeMake(i, reqiuredFPS);
[requestedFrameTimes addObject:[NSValue valueWithCMTime:time]];
}
[generator generateCGImagesAsynchronouslyForTimes:[requestedFrameTimes copy] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
if (image)
{
UIImage *generatedImage = [UIImage imageWithCGImage:image];
[requiredFrames addObject:generatedImage];
}
if (CMTimeCompare(requestedTime, [[requestedFrameTimes lastObject] CMTimeValue]) == 0)
{
NSLog(#"Image processing complete");
dispatch_async(dispatch_get_main_queue(), ^{
block(requiredFrames, generator.maximumSize);
});
}
else
{
NSLog(#"Getting frame at %lld", actualTime.value/actualTime.timescale);
}
}];
}
I want to display the thumbnail of videos by using ALAssetLibrary and for displaying the video from gallery to my app , i filtered all videos from ALAssetsFilter .
But still i am getting the null value in asset of type ALAsset.
Please tell me what i am doing wrong with my code.
Appreciate for the help.
-(void)loadAssets{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
[group setAssetsFilter:[ALAssetsFilter allVideos]];
[group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *asset, NSUInteger index, BOOL *innerStop) {
if (asset)
{
dic = [[NSMutableDictionary alloc] init];
ALAssetRepresentation *defaultRepresentation = [asset defaultRepresentation];
NSString *uti = [defaultRepresentation UTI];
videoURL = [[asset valueForProperty:ALAssetPropertyURLs] valueForKey:uti];
NSString *title = [NSString stringWithFormat:#"video %d", arc4random()%100];
UIImage *image = [self imageFromVideoURL:videoURL];
[dic setValue:image forKey:#"image"];
[dic setValue:title forKey:#"name"];
[dic setValue:videoURL forKey:#"url"];
[allVideos addObject:asset];
}
}];
[_collectionView reloadData];
}
failureBlock:^(NSError *error)
{
NSLog(#"error enumerating AssetLibrary groups %#\n", error);
}];
}
- (UIImage *)imageFromVideoURL:(NSURL*)videoURL
{
UIImage *image = nil;
AVAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];;
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
imageGenerator.appliesPreferredTrackTransform = YES;
// calc midpoint time of video
Float64 durationSeconds = CMTimeGetSeconds([asset duration]);
CMTime midpoint = CMTimeMakeWithSeconds(durationSeconds/2.0, 600);
// get the image from
NSError *error = nil;
CMTime actualTime;
CGImageRef halfWayImage = [imageGenerator copyCGImageAtTime:midpoint actualTime:&actualTime error:&error];
if (halfWayImage != NULL)
{
// cgimage to uiimage
image = [[UIImage alloc] initWithCGImage:halfWayImage];
[dic setValue:image forKey:#"ImageThumbnail"];//kImage
NSLog(#"Values of dictonary==>%#", dic);
NSLog(#"Videos Are:%#",allVideos);
CGImageRelease(halfWayImage);
}
return image;
}
Use this code may this will help you to generate thumbnail image-- -
-(UIImage*)loadImage {
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.videoURL options:nil];
AVAssetImageGenerator *generate = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generate.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
CMTime time = CMTimeMake(1, 60);
CGImageRef imgRef = [generate copyCGImageAtTime:time actualTime:NULL error:&err];
return [[UIImage alloc] initWithCGImage:imgRef];
}
I have Create the Thumbnail image from Video, its working for me and the code is below,
NSURL *videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
NSLog(#"store url %#",videoURL);
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
if ([[avAsset tracksWithMediaType:AVMediaTypeVideo] count] > 0)
{
AVAssetImageGenerator *imageGenerator =[AVAssetImageGenerator assetImageGeneratorWithAsset:avAsset];
Float64 durationSeconds = CMTimeGetSeconds([avAsset duration]);
CMTime midpoint = CMTimeMakeWithSeconds(durationSeconds/2.0, 600);
NSError *error;
CMTime actualTime;
CGImageRef halfWayImage = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime:&actualTime error:&error];
if (halfWayImage != NULL)
{
NSString *actualTimeString = (NSString *)CFBridgingRelease(CMTimeCopyDescription(NULL, actualTime));
NSString *requestedTimeString = (NSString *)CFBridgingRelease(CMTimeCopyDescription(NULL, midpoint));
NSLog(#"Got halfWayImage: Asked for %#, got %#", requestedTimeString, actualTimeString);
UIImage *img=[UIImage imageWithCGImage:halfWayImage];
playButton.hidden=NO;
self.myimageView.image= img; //[self scaleImage:img maxWidth:(img.size.width/5) maxHeight:(img.size.height/5)];
}
}
Finally got the thumbnail image, hope its helpful
I tried to convert an array of images to one mp4 file and it seems to work but when the video player starts to play it only shows a black view. I followed some other posts on here to convert images and this what I got. When I test it, all the pictures are converted and the video gets created. But when the video plays, it only shows a black view. Any help or clarification will be appreciated.
- (void) createVideoPlayer:(NSArray *)imagesArray
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(213, 320);
NSUInteger fps = 3;
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:320], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imagesArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imagesArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok)
{
NSError *error = videoWriter.error;
if(error!=nil)
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
printf("error appending image %d times %d\n, with error.", frameCount, j);
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
NSLog(#"Write Ended");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
// [self SaveVideo:outputFilePath];
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:outputFilePath]];
[moviePlayer.view setFrame:CGRectMake(0, 100, 320, 320)];
[moviePlayer prepareToPlay];
// And other options you can look through the documentation.
[self.view addSubview:moviePlayer.view];
[moviePlayer play];
}];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(300, 300);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
All UI updates must be on the main thread, double check the completionHandler is executing on main thread by calling:
NSLog(#"%d", [NSThread isMainThread]);
To execute on the mainthread use GCD:
dispatch_async(dispatch_get_main_queue(), ^{
// your UI update
});
Also you should always create a weak reference when capturing Self to avoid strong reference cycle by creating a local instance:
__weak typeof(self) weakSelf = self; // now use weakSelf.doSomething
I'm creating an extension to an AS3 AIR application that will take in pixel data as ARGB32 and I want to render it to a .mov container using the H264 codec and then send it to the users camera roll. I'm currently running a test harness which takes an image from a local path (on my desktop) as a .jpeg, gets the pixel data out and renders it 50 times over into a short clip. The problem I'm having is when I render it on 400x200, it works fine however when I use an image of any other size, the frames come out very weirdly (images splayed diagonally, sometimes with a little black bar at the bottom right as if there are missing pixels).
I have a feeling its todo with the height and width as those are the only things that change from what I can see. Here is my code below:
- (void)initFunction:(NSString *)context width:(int)widthData height:(int)heightData fps:(int)fpsData
{
NSLog(#"Initializing...");
error = nil;
frameCount = 0; //Used for itterating
fps = fpsData; //FPS from AS3
width = widthData; //Width from AS3
height = heightData; //Height from AS3
numberOfSecondsPerFrame = 1.0f/(float)fps; //Seconds per frame
frameDuration = fps * numberOfSecondsPerFrame; //Frame showing time
imageSize = CGSizeMake(width, height); //imageSize from AS3
// Setup and remove pre-existing "render.mov"
fileMgr = [NSFileManager defaultManager];
documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"render.mov"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
{
NSLog(#"This is the first VS render on this device: %#", [error localizedDescription]);
}
else
{
NSLog(#"Removed previous render file in save path.");
}
NSLog(#"Starting render.");
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:width], AVVideoWidthKey,
[NSNumber numberWithInt:height], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
buffer = NULL;
NSLog(#"**************************************************");
}
-(void) sendToCameraRoll:(NSString *)videoPath {
UISaveVideoAtPathToSavedPhotosAlbum(videoPath, nil, NULL, nil);
}
//Take in URL from AIR
- (void)addFrameFromBytes:(CGImageRef *)FrameBytes {
UIImage *image = [UIImage imageWithCGImage:*FrameBytes];
CGImageRef cgImage = [image CGImage];
buffer = [self pixelBufferFromCGImage:cgImage];
append_ok = NO;
int j = 0;
while (!append_ok) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
frameCount = frameCount+1;
NSLog(#"Processing video frame %d",frameCount);
frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
if(!append_ok){
error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("Adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
}
j++;
}
if (!append_ok) {
printf("Error appending image %d times %d\n, with error.", frameCount, j);
}
}
//Take in Path from AIR
- (void)addFrameFromURL:(NSString *)FramePath {
UIImage *image = [UIImage imageWithContentsOfFile:FramePath];
CGImageRef cgImage = [image CGImage];
buffer = [self pixelBufferFromCGImage:cgImage];
append_ok = NO;
int j = 0;
while (!append_ok) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
frameCount = frameCount+1;
NSLog(#"Processing video frame %d",frameCount);
frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
if(!append_ok){
error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
} else {
printf("Adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1]; //Prevents system overload but causes lag.
}
j++;
}
if (!append_ok) {
printf("Error appending image %d times %d\n, with error.", frameCount, j);
}
}
- (void)saveVideoWithTrack:(NSString *)AudioTrackPath {
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Render complete."); ///Used to satisfy 64-bit devices
}];
sleep(1); //Required to avoid crash due to overload
NSLog(#"Render complete.");
//Audio File Addition
NSLog(#"Singing Tracks...");
AVMutableComposition *mixComposition = [AVMutableComposition composition];
//NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
//NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:AudioTrackPath];
//Get the final video path
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
//Create the final video amd export as MOV
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_output.mov"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
}
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset *audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
//Supported file types include...
//NSLog(#"Supported file types are %#", [_assetExport supportedFileTypes]);
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(){
NSString *exported = [[NSString alloc] initWithString:[outputFileUrl path]];
[self sendToCameraRoll:exported];
}];
NSLog(#"Saved to camera roll. %#", outputFilePath);
}
- (void)saveVideo {
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Finished"); //Used to satisfy 64-bit systems.
}];
sleep(1); //Required to avoid system crash
NSLog(#"Render complete.");
[self sendToCameraRoll:videoOutputPath];
NSLog(#"Saved to camera roll. %#", videoOutputPath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
width,
height,
kCVPixelFormatType_32ARGB,
( CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
size_t bytesPerRow = 4*width; //BytesPerRow 4 bytes each pixel.
CGContextRef imageContext = CGBitmapContextCreate(pxdata, width,
height, 8, bytesPerRow, rgbColorSpace,
kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipFirst);
CGContextDrawImage(imageContext, CGRectMake(0, 0, width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(imageContext);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
NSLog(#"bPR: %zd", pxbuffer);
return pxbuffer;
}
- (void)viewDidLoad {
[super viewDidLoad];
//[self initFunction:nil width:1138 height:640 fps:25];
//[self initFunction:nil width:910 height:512 fps:25];
//[self initFunction:nil width:400 height:200 fps:25];
[self initFunction:nil width:50 height:50 fps:25];
//find url...
//get pixel data...
//loop through writing it from the pixel data
for (int i = 1; i <= 50; i++) {
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/sequence/%06d.jpg", i];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image1.jpg"];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/ThisIsATest/ThisIsATest/image2.jpg"];
//NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/mini.jpg"];
NSString *string = [NSString stringWithFormat:#"/Users/Lewis/Desktop/50x50.jpg"];
NSLog(#"%#", string);
[self addFrameFromURL:string];
}
[self saveVideo];
}
Edit: All images work if they are 400 x 200.
All also work at 800 x 400, 1600 x 800.
600 x 300 does not work.
Possibly aspect ratios?
Solved this issue using specific dimensions. AVAssetWriter only supports specific "valid" aspect ratios.
I'm trying to add a 1s audio track ("dummy_recording.m4a") to the beginning of my 3s video. But what I'm getting as a result right now is a 6s long video. It starts with the recording with a black background, then shows only the black background, then shows the video at the end. What am I doing wrong here? I just want the audio to overlap my video starting at the beginning.
-(void) addAudioToFileAtPath:(NSString *) filePath toPath:(NSString *)outFilePath completion:( void ( ^ ) () )completion
{
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:#"dummy_recording"
ofType:#"m4a"];
NSDictionary *audioInfoDictionary = #{#"audioFilePath": audioFilePath, #"audioDuration": [NSNumber numberWithFloat:1.0]};
NSArray *audioInfoArray = #[audioInfoDictionary];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"videoAsset.duration... value: %lld, timescale: %d, seconds: %lld", videoAsset.duration.value, videoAsset.duration.timescale, videoAsset.duration.value / videoAsset.duration.timescale);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero
error:&error];
CMTime audioStartTime = kCMTimeZero;
for (NSDictionary * audioInfo in audioInfoArray)
{
NSString * pathString = [audioInfo objectForKey:#"audioFilePath"];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"urlAsset.duration... value: %lld, timescale: %d, seconds: %lld", urlAsset.duration.value, urlAsset.duration.timescale, urlAsset.duration.value / urlAsset.duration.timescale);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) (([[audioInfo objectForKey:#"audioDuration"] floatValue] * RECORDING_FPS) + 0.5), RECORDING_FPS));
}
NSLog(#"composition.duration... value: %lld, timescale: %d, seconds: %lld", composition.duration.value, composition.duration.timescale, composition.duration.value / composition.duration.timescale);
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
completion();
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
Here's the result of the statements that log the durations. The composition's is 3s long, which is what I want, but it still exports wrongly to 6s long:
videoAsset.duration... value: 1840, timescale: 600, seconds: 3
urlAsset.duration... value: 87040, timescale: 44100, seconds: 1
composition.duration... value: 1840, timescale: 600, seconds: 3
I created the 3s video file from a still image. Here's the code:
NSString *documentsFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)
objectAtIndex:0];
NSString *path = [documentsFolder stringByAppendingPathComponent:#"test_video.mp4"];
DDLogInfo(#"path: %#", path);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *removeItemError;
BOOL success = [fileManager removeItemAtPath:path error:&removeItemError];
if (success) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError localizedDescription]);
}
NSString *path2 = [documentsFolder stringByAppendingPathComponent:#"test_video_with_audio.mp4"];
DDLogInfo(#"path2: %#", path);
NSError *removeItemError2;
BOOL success2 = [fileManager removeItemAtPath:path2 error:&removeItemError2];
if (success2) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError2 localizedDescription]);
}
//1. Wire the writer.
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
self.videoWriter = videoWriter;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:640], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings]; //retain should be removed if ARC
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
//2. Start a session
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero]; //use kCMTimeZero if unsure
UIImage *image = [UIImage imageNamed:#"dummy_square.jpg"];
CGImageRef cgImage = image.CGImage;
//3. Write some samples
//CVPixelBufferRef pixelBuffer = [self newPixelBufferFromCGImage:cgImage];
CVPixelBufferRef pixelBuffer = [self pixelBufferFromCGImage:cgImage];
BOOL result = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMakeWithSeconds(3.0, RECORDING_FPS)];
if (result == NO)
NSLog(#"failed to append buffer");
else
NSLog(#"appended buffer!");
if(pixelBuffer)
{
CVBufferRelease(pixelBuffer);
}
//4. Finish the session
[writerInput markAsFinished];
//[videoWriter endSessionAtSourceTime:…]; //optional can call finishWriting without specifiying endTime
self.library = [[ALAssetsLibrary alloc] init];
__weak ALAssetsLibrary *lib = self.library;
[videoWriter finishWritingWithCompletionHandler:^{
[self addAudioToFileAtPath:path toPath:path2 completion:^{
NSString *albumName = #"Test Album";
NSURL *pathUrl = [[NSURL alloc] initWithString:path2];
[lib addAssetsGroupAlbumWithName:albumName resultBlock:^(ALAssetsGroup *group) {
///checks if group previously created
if(group == nil){
//enumerate albums
[lib enumerateGroupsWithTypes:ALAssetsGroupAlbum
usingBlock:^(ALAssetsGroup *g, BOOL *stop)
{
//if the album is equal to our album
if ([[g valueForProperty:ALAssetsGroupPropertyName] isEqualToString:albumName]) {
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[g addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
}failureBlock:^(NSError *error){
}];
}else{
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[group addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
} failureBlock:^(NSError *error) {
}];
}];
}];
here i show u, how to convert from array of images and also adding music to video
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory12 = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory12 stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(480, 320);
NSUInteger fps = 1;
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//NSLog(#"fps :%f",(60.0-finalSec)/(float)[photoAlbumImages count]);
float numberOfSecondsPerFrame =60.0/(float)[photoAlbumImages count];
//NSLog(#"total fps :%f",numberOfSecondsPerFrame);
float frameDuration = fps * numberOfSecondsPerFrame;
NSLog(#"frame duration :%f",frameDuration);
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img12 in photoAlbumImages)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img12 CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,[photoAlbumImages count]);
CMTime frameTime12 = CMTimeMake(frameCount*frameDuration,fps);
// NSLog(#"%#",frameTime12);
NSLog(#"seconds = %f", CMTimeGetSeconds(frameTime12));
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime12];
CVPixelBufferRelease(buffer);
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.3];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog (#"finished writing");
}];
NSLog(#"Write Ended");
////////////////////////////////////////////////////////////////////////////
////////////// OK now add an audio file to move file /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
AVURLAsset* audioAsset;
if(pathURL==NULL){
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
}
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *str=[NSString stringWithFormat:#"project1/imgtovid%#.mov",[self getCurrentDateTimeAsNSString]];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:str];
// NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
NSURL *outputFileUrl1;
if(outputFileUrl!=nil){
NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
outputFileUrl1 = [NSURL URLWithString:webStringURL];
[self.project1Array addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
[positionArray addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
}
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// NSLog(#"duration - %f",videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack12 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if(pathURL==NULL){
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}else{
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[addAudioAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
_assetExport.outputURL = outputFileUrl;
NSLog(#"duration = %f", CMTimeGetSeconds(videoAsset.duration));
_assetExport.timeRange=CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
}
Here pixelbuffer from image
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef)photimage {
CGSize size = CGSizeMake(480, 320);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, size.width,
size.height), photimage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
If u want any more help please feel free to contact
I have same requirement of making movie from images and sounds. I have used this method to add movie and audio.
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//AUDIO FILE PATH
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"final_%#.m4a",appDelegate.storyName]];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:path];
//VIDEO FILE PATH
NSString* path1 = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:path1];
//FINAL VIDEO PATH
NSString* path2 = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"outputFile_%#.mp4",appDelegate.storyName]];
NSURL* outputFileUrl = [NSURL fileURLWithPath:path2];
NSLog(#"%#",path2);
if ([[NSFileManager defaultManager] fileExistsAtPath:path2])
[[NSFileManager defaultManager] removeItemAtPath:path2 error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, video_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:CMTimeMake(2,1) error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//VIDEO COMPLETED
}
];
}
Sorry the code is little bit messy but I think it will work. Please let me know if I need to explain it more.
MOVIE MAKING CODE EDITED
#pragma mark - Movie Making Code
-(void)makeMovie{
//making movie and audio code ll be here
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"temp.mp4"]];
NSLog(#"%#",path);
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
CGSize sz = CGSizeMake(480, 320);
[self writeImageAsMovie:appDelegate.imageArray toPath:path size:sz];
}
- (void)writeImageAsMovie:(NSMutableArray *)image toPath:(NSString*)path size:(CGSize)size
{
//last screen adding
UIImage *tempImg=[UIImage imageNamed:#"powered_by.png"];
[image addObject:tempImg];
//last screen adding end
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
/* CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];
*/
float nxttime=0;
float time=0;
CVPixelBufferRef buffer=nil;
#autoreleasepool {
for(int i=0;i<image.count;i++)
{
NSLog(#"%d",image.count);
if([writerInput isReadyForMoreMediaData])
{
if(i!=0 && i!=4)
{
NSLog(#"AUDIO DURATION:%#",appDelegate.audioDuration);
nxttime=nxttime+time;
time=([[appDelegate.audioDuration objectAtIndex:i-1] floatValue])+1;
}
else if(i==0)
{
nxttime=0;
time=3;
}
else if(i==4)
{
nxttime=nxttime+5;
}
buffer = [self pixelBufferFromCGImage:[[image objectAtIndex:i] CGImage] size:size];
CMTime frameTime = CMTimeMake(time, 1 );
CMTime lastTime=CMTimeMake(nxttime, 1);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
// NSLog(#"%d: ft:%# Lt:%# PT:%#",i,frameTime,lastTime,presentTime);
[adaptor appendPixelBuffer:buffer withPresentationTime:lastTime];
}
else
{
i=i-1;
}
}
}
//Finish the session:
[writerInput markAsFinished];
[self latestCombineVoices];
[videoWriter finishWriting];
}
- (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options,
&pxbuffer);
status=status;//Added to make the stupid compiler not show a stupid warning.
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
// CGContextTranslateCTM(context, 0, CGImageGetHeight(image));
//CGContextScaleCTM(context, 1.0, -1.0);//Flip vertically to account for different origin
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
In my case For example: Have img1,img2,img3 and sound1,sound2,sound3 So I make a movie where img1 will show for sound1_duration and so on.