create video from images and save to photo album - ios

I was making a video out of an array of images and an audio file
but i can't seem to save it to the camera roll
some times it saves but some times it doesn't and gives me a strange error
outputFile.mp4 cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=-12848 "Movie could not be played." UserInfo=0x7bd7370 {NSLocalizedDescription=Movie could not be played.}
This function creates video from array of images :
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 16;
int kRecordingFPS = 30;
UIImage * im = [UIImage imageNamed:#"IMG_0103.JPG"];
NSArray * imageArray = [[NSArray alloc]initWithObjects:im,im,im,im,im,im,im,im,im,im,im,im,im,im,im,im, nil];
for(UIImage * img in imageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) kRecordingFPS);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
[videoWriterInput release];
[videoWriter release];
[imageArray release];
}
This function adds the audio :
-(void)CompileFilesToMakeMovie{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString* audio_inputFileName = #"Rihanna-Take a Bow.mp3";
NSString* audio_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"a.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[_assetExport exportAsynchronouslyWithCompletionHandler:nil];
[audioAsset release];
[videoAsset release];
[_assetExport release];
}
The function calls :
- (void)viewDidLoad
{
[super viewDidLoad];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
UIImage * i = [UIImage imageNamed:#"IMG_0103.JPG"];
[self writeImagesToMovieAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"a.mp4"] withSize:CGSizeMake(i.size.width, i.size.height)];
NSString* exportVideoPath1 = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"a.mp4"];
NSString* exportVideoPath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"outputFile.mp4"];
UISaveVideoAtPathToSavedPhotosAlbum (exportVideoPath,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}

In my case, the reason was that AVAssetWriter hasn't finished writing. Solved it with the following piece of code:
__block BOOL _finished = NO;
[assetWriter finishWritingWithCompletionHandler:^{
_finished = YES;
}];
while (!_finished) {
[NSThread sleepForTimeInterval:0.1];
}

Related

Unsupported Header Format while conversation of file from .mov to .wav format and It happens only in below iOS 11 devices (like iOS 9,10)

Below is my code about what i have done for this .mov to .wav conversations :
- (void)mp4ForURL:(NSURL *)videoURL{
// Create the asset url with the video file
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
// Check if video is supported for conversion or not
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality])////************************************////
{
//Create Export session
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];////************************************////
//Creating temp path to save the converted video
NSString* documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString* myDocumentPath = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL *url = [[NSURL alloc] initFileURLWithPath:myDocumentPath];
//Check if the file already exists then remove the previous file
if ([[NSFileManager defaultManager]fileExistsAtPath:myDocumentPath])
{
[[NSFileManager defaultManager]removeItemAtPath:myDocumentPath error:nil];
}
exportSession.outputURL = url;
//set the output file format if you want to make it in other file format (ex .3gp)
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export session failed");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
{
//Video conversion finished
NSLog(#"Successful!");
[self convertMP4toMP3withFile:myDocumentPath];
}
break;
default:
break;
}
}];
}
else
{
NSLog(#"Video file not supported!");
}
}
-(void)convertMP4toCAFwithFile:(NSString*)dstPath //Converted to Core Audio Format .caf
{
NSURL *dstURL = [NSURL fileURLWithPath:dstPath];
AVMutableComposition* newAudioAsset = [AVMutableComposition composition];
AVMutableCompositionTrack* dstCompositionTrack;
dstCompositionTrack = [newAudioAsset addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset* srcAsset = [AVURLAsset URLAssetWithURL:dstURL options:nil];
AVAssetTrack* srcTrack = [[srcAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange timeRange = srcTrack.timeRange;
NSError* error;
if(NO == [dstCompositionTrack insertTimeRange:timeRange ofTrack:srcTrack atTime:kCMTimeZero error:&error]) {
NSLog(#"track insert failed: %#\n", error);
return;
}
AVAssetExportSession* exportSesh = [[AVAssetExportSession alloc] initWithAsset:newAudioAsset presetName:AVAssetExportPresetPassthrough];
exportSesh.outputFileType = AVFileTypeCoreAudioFormat;
exportSesh.outputURL = dstURL;
[[NSFileManager defaultManager] removeItemAtURL:dstURL error:nil];
[exportSesh exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exportSesh.status;
NSLog(#"exportAsynchronouslyWithCompletionHandler: %li\n", (long)status);
if(AVAssetExportSessionStatusFailed == status) {
NSLog(#"FAILURE: %#\n", exportSesh.error);
} else if(AVAssetExportSessionStatusCompleted == status) {
NSLog(#"SUCCESS!\n");
NSError *error;
//append the name of the file in jpg form
//check if the file exists (completely unnecessary).
NSString *onlyPath = [dstPath stringByDeletingLastPathComponent];
NSInteger randomNumber = arc4random() % 100000;
strDateAndTime = [self getCurrentDateAndTime];
strAudioName = [NSString stringWithFormat:#"%#_%ld_%#.%#", #"Audio", (long)randomNumber, strDateAndTime, #"caf"];
NSString *toPathString = [NSString stringWithFormat:#"%#/%#", onlyPath, strAudioName];
[[NSFileManager defaultManager] moveItemAtPath:dstPath toPath:toPathString error:&error];
//[self uploadAudioOnAWSFromPath:toPathString];
[self convertToWavForFilePath:toPathString];
}
}];
}
- (void)convertToWavForFilePath:(NSString *)cafFilePath
{
// set up an AVAssetReader to read from the iPod Library
// NSString *cafFilePath=[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSInteger randomNumber = arc4random() % 100000;
_finalAudioName = [NSString stringWithFormat:#"%#_%ld_%#", #"Audio", (long)randomNumber, strDateAndTime];
// NSString *title = #"MyRec";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
__block NSString *wavFilePath = [[docDir stringByAppendingPathComponent :_finalAudioName]
stringByAppendingPathExtension:#"wav"];
_finalAudioName = [wavFilePath lastPathComponent];
if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeWAVE
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
CMTime sampleDuration = CMSampleBufferGetDuration(nextBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
float dProgress= CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(songAsset.duration);
NSLog(#"%f",dProgress);
}
else
{
[assetWriterInput markAsFinished];
[assetReader cancelReading];
[assetWriter finishWritingWithCompletionHandler:^{
[self uploadAudioOnAWSFromPath:wavFilePath];
}];
}
}
}];
}
In above code, the converted final file which is in .wav format that I have to send on AWS server to get text from it, For text conversation we used Google Speech API, and whenever I send file which is converted from below iOS 11 devices(like iOS 9,10), it shows me this error :
Error: WAV header indicates an unsupported format.
Above error is shown by Google Speech API while converting an Audio file(.wav) to Text format.
Above code is working perfectly with latest iOS 11 devices, only below iOS 11(like iOS 9,10) devices are not working properly.

Getting a saved video from document directory to view in iOS

I'm Generating video and saving to CameraRoll,here i need to fetch the video to my next view which was generated and saved in Camera Roll.Hope someone helps.I'm not getting the correct one to follow.Actually I'm creating and saving video using this code
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
NSUInteger fps = 30;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:#"jpg" inDirectory:nil];
self.chosenImages = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(#"-->imageArray.count= %i", self.chosenImages.count);
for (NSString* path in imagePaths)
{
[self.chosenImages addObject:[UIImage imageWithContentsOfFile:path]];
}
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdapto assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
for(UIImage * img in self.chosenImages)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long) [self.chosenImages count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack: [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
//_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
[assetLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileUrl completionBlock:^(NSURL *assetURL, NSError *error){
if(error == nil){
NSLog(#"Saved Successfully");
}
}
];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);

Getting generated video to next view and saving it to Camera roll in iOS

Here I need to generate video with multiple images picked,music selected.In this I can pick multiple images and generate video to those images By saving selected images in an array and generating video.But my issue is, generated video should present in next view and also it should be saved in CameraRoll.But the generated video is going to Documents and saving there.Someone help me..
I'm following this link
https://github.com/caferrara/img-to-video
My code:
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
NSUInteger fps = 30;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:#"jpg" inDirectory:nil];
self.chosenImages = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(#"-->imageArray.count= %i", self.chosenImages.count);
for (NSString* path in imagePaths)
{
[self.chosenImages addObject:[UIImage imageWithContentsOfFile:path]];
}
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in self.chosenImages)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[self.chosenImages count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
////////////////////////////////////////////////////////////////////////////
////////////// OK now add an audio file to move file /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack: [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
//_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[self saveVideoToAlbum:outputFilePath];
}
];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
You can also fetch your video in next screen by your document path.
You may store your video from Documents folder to PhotoLibrary by executing below code.
ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
[assetLibrary writeVideoAtPathToSavedPhotosAlbum:DocumentPath completionBlock:^(NSURL *assetURL, NSError *error){
If(error == nil){
// Saved successfully
}
}];

Converting array of images to mp4

I tried to convert an array of images to one mp4 file and it seems to work but when the video player starts to play it only shows a black view. I followed some other posts on here to convert images and this what I got. When I test it, all the pictures are converted and the video gets created. But when the video plays, it only shows a black view. Any help or clarification will be appreciated.
- (void) createVideoPlayer:(NSArray *)imagesArray
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
//NSLog(#"-->videoOutputPath= %#", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(213, 320);
NSUInteger fps = 3;
////////////// end setup ///////////////////////////////////
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:320], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img in imagesArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
//print out status:
NSLog(#"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imagesArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok)
{
NSError *error = videoWriter.error;
if(error!=nil)
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
printf("error appending image %d times %d\n, with error.", frameCount, j);
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
NSLog(#"Write Ended");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
//NSLog(#"support file types= %#", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
// [self SaveVideo:outputFilePath];
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:outputFilePath]];
[moviePlayer.view setFrame:CGRectMake(0, 100, 320, 320)];
[moviePlayer prepareToPlay];
// And other options you can look through the documentation.
[self.view addSubview:moviePlayer.view];
[moviePlayer play];
}];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize size = CGSizeMake(300, 300);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
All UI updates must be on the main thread, double check the completionHandler is executing on main thread by calling:
NSLog(#"%d", [NSThread isMainThread]);
To execute on the mainthread use GCD:
dispatch_async(dispatch_get_main_queue(), ^{
// your UI update
});
Also you should always create a weak reference when capturing Self to avoid strong reference cycle by creating a local instance:
__weak typeof(self) weakSelf = self; // now use weakSelf.doSomething

Exported video using AVMutableComposition and AVAssetExportSession does not overlap video and audio

I'm trying to add a 1s audio track ("dummy_recording.m4a") to the beginning of my 3s video. But what I'm getting as a result right now is a 6s long video. It starts with the recording with a black background, then shows only the black background, then shows the video at the end. What am I doing wrong here? I just want the audio to overlap my video starting at the beginning.
-(void) addAudioToFileAtPath:(NSString *) filePath toPath:(NSString *)outFilePath completion:( void ( ^ ) () )completion
{
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:#"dummy_recording"
ofType:#"m4a"];
NSDictionary *audioInfoDictionary = #{#"audioFilePath": audioFilePath, #"audioDuration": [NSNumber numberWithFloat:1.0]};
NSArray *audioInfoArray = #[audioInfoDictionary];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"videoAsset.duration... value: %lld, timescale: %d, seconds: %lld", videoAsset.duration.value, videoAsset.duration.timescale, videoAsset.duration.value / videoAsset.duration.timescale);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero
error:&error];
CMTime audioStartTime = kCMTimeZero;
for (NSDictionary * audioInfo in audioInfoArray)
{
NSString * pathString = [audioInfo objectForKey:#"audioFilePath"];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"urlAsset.duration... value: %lld, timescale: %d, seconds: %lld", urlAsset.duration.value, urlAsset.duration.timescale, urlAsset.duration.value / urlAsset.duration.timescale);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) (([[audioInfo objectForKey:#"audioDuration"] floatValue] * RECORDING_FPS) + 0.5), RECORDING_FPS));
}
NSLog(#"composition.duration... value: %lld, timescale: %d, seconds: %lld", composition.duration.value, composition.duration.timescale, composition.duration.value / composition.duration.timescale);
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
completion();
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
Here's the result of the statements that log the durations. The composition's is 3s long, which is what I want, but it still exports wrongly to 6s long:
videoAsset.duration... value: 1840, timescale: 600, seconds: 3
urlAsset.duration... value: 87040, timescale: 44100, seconds: 1
composition.duration... value: 1840, timescale: 600, seconds: 3
I created the 3s video file from a still image. Here's the code:
NSString *documentsFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)
objectAtIndex:0];
NSString *path = [documentsFolder stringByAppendingPathComponent:#"test_video.mp4"];
DDLogInfo(#"path: %#", path);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *removeItemError;
BOOL success = [fileManager removeItemAtPath:path error:&removeItemError];
if (success) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError localizedDescription]);
}
NSString *path2 = [documentsFolder stringByAppendingPathComponent:#"test_video_with_audio.mp4"];
DDLogInfo(#"path2: %#", path);
NSError *removeItemError2;
BOOL success2 = [fileManager removeItemAtPath:path2 error:&removeItemError2];
if (success2) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError2 localizedDescription]);
}
//1. Wire the writer.
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
self.videoWriter = videoWriter;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:640], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings]; //retain should be removed if ARC
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
//2. Start a session
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero]; //use kCMTimeZero if unsure
UIImage *image = [UIImage imageNamed:#"dummy_square.jpg"];
CGImageRef cgImage = image.CGImage;
//3. Write some samples
//CVPixelBufferRef pixelBuffer = [self newPixelBufferFromCGImage:cgImage];
CVPixelBufferRef pixelBuffer = [self pixelBufferFromCGImage:cgImage];
BOOL result = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMakeWithSeconds(3.0, RECORDING_FPS)];
if (result == NO)
NSLog(#"failed to append buffer");
else
NSLog(#"appended buffer!");
if(pixelBuffer)
{
CVBufferRelease(pixelBuffer);
}
//4. Finish the session
[writerInput markAsFinished];
//[videoWriter endSessionAtSourceTime:…]; //optional can call finishWriting without specifiying endTime
self.library = [[ALAssetsLibrary alloc] init];
__weak ALAssetsLibrary *lib = self.library;
[videoWriter finishWritingWithCompletionHandler:^{
[self addAudioToFileAtPath:path toPath:path2 completion:^{
NSString *albumName = #"Test Album";
NSURL *pathUrl = [[NSURL alloc] initWithString:path2];
[lib addAssetsGroupAlbumWithName:albumName resultBlock:^(ALAssetsGroup *group) {
///checks if group previously created
if(group == nil){
//enumerate albums
[lib enumerateGroupsWithTypes:ALAssetsGroupAlbum
usingBlock:^(ALAssetsGroup *g, BOOL *stop)
{
//if the album is equal to our album
if ([[g valueForProperty:ALAssetsGroupPropertyName] isEqualToString:albumName]) {
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[g addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
}failureBlock:^(NSError *error){
}];
}else{
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[group addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
} failureBlock:^(NSError *error) {
}];
}];
}];
here i show u, how to convert from array of images and also adding music to video
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory12 = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory12 stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(480, 320);
NSUInteger fps = 1;
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//NSLog(#"fps :%f",(60.0-finalSec)/(float)[photoAlbumImages count]);
float numberOfSecondsPerFrame =60.0/(float)[photoAlbumImages count];
//NSLog(#"total fps :%f",numberOfSecondsPerFrame);
float frameDuration = fps * numberOfSecondsPerFrame;
NSLog(#"frame duration :%f",frameDuration);
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img12 in photoAlbumImages)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img12 CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,[photoAlbumImages count]);
CMTime frameTime12 = CMTimeMake(frameCount*frameDuration,fps);
// NSLog(#"%#",frameTime12);
NSLog(#"seconds = %f", CMTimeGetSeconds(frameTime12));
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime12];
CVPixelBufferRelease(buffer);
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.3];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog (#"finished writing");
}];
NSLog(#"Write Ended");
////////////////////////////////////////////////////////////////////////////
////////////// OK now add an audio file to move file /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
AVURLAsset* audioAsset;
if(pathURL==NULL){
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
}
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *str=[NSString stringWithFormat:#"project1/imgtovid%#.mov",[self getCurrentDateTimeAsNSString]];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:str];
// NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
NSURL *outputFileUrl1;
if(outputFileUrl!=nil){
NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
outputFileUrl1 = [NSURL URLWithString:webStringURL];
[self.project1Array addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
[positionArray addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
}
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// NSLog(#"duration - %f",videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack12 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if(pathURL==NULL){
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}else{
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[addAudioAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
_assetExport.outputURL = outputFileUrl;
NSLog(#"duration = %f", CMTimeGetSeconds(videoAsset.duration));
_assetExport.timeRange=CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
}
Here pixelbuffer from image
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef)photimage {
CGSize size = CGSizeMake(480, 320);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, size.width,
size.height), photimage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
If u want any more help please feel free to contact
I have same requirement of making movie from images and sounds. I have used this method to add movie and audio.
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//AUDIO FILE PATH
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"final_%#.m4a",appDelegate.storyName]];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:path];
//VIDEO FILE PATH
NSString* path1 = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:path1];
//FINAL VIDEO PATH
NSString* path2 = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"outputFile_%#.mp4",appDelegate.storyName]];
NSURL* outputFileUrl = [NSURL fileURLWithPath:path2];
NSLog(#"%#",path2);
if ([[NSFileManager defaultManager] fileExistsAtPath:path2])
[[NSFileManager defaultManager] removeItemAtPath:path2 error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, video_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:CMTimeMake(2,1) error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//VIDEO COMPLETED
}
];
}
Sorry the code is little bit messy but I think it will work. Please let me know if I need to explain it more.
MOVIE MAKING CODE EDITED
#pragma mark - Movie Making Code
-(void)makeMovie{
//making movie and audio code ll be here
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"temp.mp4"]];
NSLog(#"%#",path);
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
CGSize sz = CGSizeMake(480, 320);
[self writeImageAsMovie:appDelegate.imageArray toPath:path size:sz];
}
- (void)writeImageAsMovie:(NSMutableArray *)image toPath:(NSString*)path size:(CGSize)size
{
//last screen adding
UIImage *tempImg=[UIImage imageNamed:#"powered_by.png"];
[image addObject:tempImg];
//last screen adding end
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
/* CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];
*/
float nxttime=0;
float time=0;
CVPixelBufferRef buffer=nil;
#autoreleasepool {
for(int i=0;i<image.count;i++)
{
NSLog(#"%d",image.count);
if([writerInput isReadyForMoreMediaData])
{
if(i!=0 && i!=4)
{
NSLog(#"AUDIO DURATION:%#",appDelegate.audioDuration);
nxttime=nxttime+time;
time=([[appDelegate.audioDuration objectAtIndex:i-1] floatValue])+1;
}
else if(i==0)
{
nxttime=0;
time=3;
}
else if(i==4)
{
nxttime=nxttime+5;
}
buffer = [self pixelBufferFromCGImage:[[image objectAtIndex:i] CGImage] size:size];
CMTime frameTime = CMTimeMake(time, 1 );
CMTime lastTime=CMTimeMake(nxttime, 1);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
// NSLog(#"%d: ft:%# Lt:%# PT:%#",i,frameTime,lastTime,presentTime);
[adaptor appendPixelBuffer:buffer withPresentationTime:lastTime];
}
else
{
i=i-1;
}
}
}
//Finish the session:
[writerInput markAsFinished];
[self latestCombineVoices];
[videoWriter finishWriting];
}
- (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options,
&pxbuffer);
status=status;//Added to make the stupid compiler not show a stupid warning.
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
// CGContextTranslateCTM(context, 0, CGImageGetHeight(image));
//CGContextScaleCTM(context, 1.0, -1.0);//Flip vertically to account for different origin
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
In my case For example: Have img1,img2,img3 and sound1,sound2,sound3 So I make a movie where img1 will show for sound1_duration and so on.

Resources