This has bothering me for a while. i have video convert to convert video into “.mp4” format. But there is a crash that happens on some video but not all.
here is the crash log
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:]
Cannot append sample buffer: First input buffer must have an appropriate kCMSampleBufferAttachmentKey_TrimDurationAtStart since the codec has encoder delay'
here is my codes:
NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:[self getVideoName]] stringByAppendingString:#".mp4"]];
AVAssetTrack *videoTrack = [[self.avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = videoTrack.naturalSize;
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
self.assetWriter = [[AVAssetWriter alloc] initWithURL:uploadURL fileType:AVFileTypeQuickTimeMovie error:nil];
[self.assetWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
self.assetReader = [[AVAssetReader alloc] initWithAsset:self.avAsset error:nil];
[self.assetReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[self.assetWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[self.avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:self.avAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[self.assetWriter startWriting];
[self.assetReader startReading];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue
usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef sampleBuffer = NULL;
if ([self.assetReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
if ([self.assetReader status] == AVAssetReaderStatusCompleted)
{
//start writing from audio reader
[audioReader startReading];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer]))
{
if (sampleBuffer) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
}
CFRelease(sampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted) {
[self.assetWriter finishWritingWithCompletionHandler:^(){
[self createLiveTrailerApiForVideoId:video.dbId];
}];
}
}
}
}];
}
}
}
}];
and this is the part that causing the crash
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer]))
{
if (sampleBuffer) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
}
CFRelease(sampleBuffer);
}
I have been searching around, seems like I need to set the 'kCMSampleBufferAttachmentKey_TrimDurationAtStart' to first buffer, but can't find any example about how to set this value.
Please advise. Thanks!
Just like this:
CFDictionaryRef dict = NULL;
if (firstBuffer) {
firstBuffer = NO;
dict = CMTimeCopyAsDictionary(CMTimeMake(1024, 44100), kCFAllocatorDefault);
CMSetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_TrimDurationAtStart, dict, kCMAttachmentMode_ShouldNotPropagate);
}
According to this thread on Apple Mailing lists I'd suggest you
to check on CMAttachment Reference
just for clarity, you should rename your `sampleBuffer variable in the inner while
did you try to pass a dictionary containing in kCMSampleBufferAttachmentKey_TrimDurationAtStart when initializing your audioReaderOuput ? (not sure how it should be generated
I am able to record a video using AVCaptureSession.But i want to
reduce it size.How can i do this ? I am getting final URL in Delegate
"captureOutput" method.
VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
{
[CaptureSession addInput:VideoInputDevice];
}
else
{
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
I hope this method will be helpful for u
- (void)compressingVideoWithSoundWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL andResolution:(CGSize)resolution
{
self.myVideoWriter = nil;
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
//setup video writer
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:resolution.width], AVVideoWidthKey, [NSNumber numberWithFloat:resolution.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
NSError* writerError = nil;
self.myVideoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
if (writerError) {
NSLog(#"Writer error: %#", writerError);
NSString *message = [NSString stringWithFormat:#"VideoEditor. compressingVideoWithInputURL: outputURL: andResolution:. Writer error: %#", writerError];
FLog(message);
return;
}
[self.myVideoWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
[videoReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[self.myVideoWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[self.myVideoWriter startWriting];
//start writing from video reader
[videoReader startReading];
[self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", DISPATCH_QUEUE_SERIAL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([videoReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[videoWriterInput markAsFinished];
if ([videoReader status] == AVAssetReaderStatusCompleted) {
if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) {
}
else{
//start writing from audio reader
[audioReader startReading];
[self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted)
{
[self finishWritingAction];
break;
}
}
}
}
];
}
}
}
}
}
];
}
You want to use an AVAssetExportSession. You code will look something like this:
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:myAsset presetName:exportPreset];
exporter.videoComposition = _videoComposition;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = NO;
url = [url URLByAppendingPathExtension:CFBridgingRelease(UTTypeCopyPreferredTagWithClass((__bridge CFStringRef)(exporter.outputFileType), kUTTagClassFilenameExtension))];
exporter.outputURL = url;
[exporter exportAsynchronouslyWithCompletionHandler:^{
// handle the completion in a way meaningful to your app
}];
You export preset should be one of these:
AVF_EXPORT NSString *const AVAssetExportPreset640x480 NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset960x540 NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1280x720 NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1920x1080 NS_AVAILABLE(10_7, 5_0);
AVF_EXPORT NSString *const AVAssetExportPreset3840x2160 NS_AVAILABLE(10_10, NA);
I am trying to trim and then compress a video file.
For trimming I am using AVAssetExportSession
For compression I am using AVAssetWriter.
If I use both codes individually every thing works fine but If I trim and then feed the trim output for compression I got compressed but corrupt video.
- (void)viewDidLoad
{
[super viewDidLoad];
[self trimVideo];
}
Trimming Code
-(void)trimVideo {
AVAsset *anAsset = [[AVURLAsset alloc]initWithURL:[self.asset valueForProperty:ALAssetPropertyAssetURL] options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
NSString *fName = [NSString stringWithFormat:#"%#.%#", #"tempVid", #"mp4"];
saveURL = [NSURL fileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:fName]];
NSString *fName1 = [NSString stringWithFormat:#"%#.%#", #"tempVid1", #"mp4"];
saveURL1 = [NSURL fileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:fName1]];
exportSession.outputURL = saveURL;
exportSession.outputFileType = AVFileTypeMPEG4;
CMTime start = CMTimeMakeWithSeconds(1.0, 600);
CMTime duration = CMTimeMakeWithSeconds(180.0, 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
[self convertVideoToLowQuailtyWithInputURL:saveURL outputURL:saveURL1];
break;
default:
break;
}
}];
}
Compression Code
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL
{
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//CGSize videoSize = videoTrack.naturalSize;
NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:960000],AVVideoAverageBitRateKey, AVVideoProfileLevelH264Main32, AVVideoProfileLevelKey,
[NSNumber numberWithInt:24], AVVideoMaxKeyFrameIntervalKey, [NSNumber numberWithInt:0.0], AVVideoMaxKeyFrameIntervalDurationKey, nil],
AVVideoCompressionPropertiesKey, [NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:320], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil];
[videoWriter addInput:videoWriterInput];
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
[videoReader addOutput:videoReaderOutput];
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[videoWriter startWriting];
[videoReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([videoReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[videoWriterInput markAsFinished];
if ([videoReader status] == AVAssetReaderStatusCompleted) {
//start writing from audio reader
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else {
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted) {
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog(#"Success");
}];
}
}
}
}
];
}
}
}
}
];
}
You must use AVMutableCompositionTrack
class and for media file trimming you must need use - (void)removeTimeRange:(CMTimeRange)timeRange method.
AVAsset *videoAsset = <#AVAsset with at least one video track#>;
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compatibleCompositionTrack = [mutableComposition mutableTrackCompatibleWithTrack:videoAssetTrack];
if (compatibleCompositionTrack) {
// Implementation continues.
[compatibleCompositionTrack removeTimeRange: yourTimeRange];
//Export Now your media file using exportSession
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:compatibleCompositionTrack presetName:AVAssetExportPresetHighestQuality];
exportSession.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
[self convertVideoToLowQuailtyWithInputURL:saveURL outputURL:saveURL1];
break;
default:
break;
}
}
];
}
I'm trying to add a 1s audio track ("dummy_recording.m4a") to the beginning of my 3s video. But what I'm getting as a result right now is a 6s long video. It starts with the recording with a black background, then shows only the black background, then shows the video at the end. What am I doing wrong here? I just want the audio to overlap my video starting at the beginning.
-(void) addAudioToFileAtPath:(NSString *) filePath toPath:(NSString *)outFilePath completion:( void ( ^ ) () )completion
{
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:#"dummy_recording"
ofType:#"m4a"];
NSDictionary *audioInfoDictionary = #{#"audioFilePath": audioFilePath, #"audioDuration": [NSNumber numberWithFloat:1.0]};
NSArray *audioInfoArray = #[audioInfoDictionary];
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"videoAsset.duration... value: %lld, timescale: %d, seconds: %lld", videoAsset.duration.value, videoAsset.duration.timescale, videoAsset.duration.value / videoAsset.duration.timescale);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero
error:&error];
CMTime audioStartTime = kCMTimeZero;
for (NSDictionary * audioInfo in audioInfoArray)
{
NSString * pathString = [audioInfo objectForKey:#"audioFilePath"];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
NSLog(#"urlAsset.duration... value: %lld, timescale: %d, seconds: %lld", urlAsset.duration.value, urlAsset.duration.timescale, urlAsset.duration.value / urlAsset.duration.timescale);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) (([[audioInfo objectForKey:#"audioDuration"] floatValue] * RECORDING_FPS) + 0.5), RECORDING_FPS));
}
NSLog(#"composition.duration... value: %lld, timescale: %d, seconds: %lld", composition.duration.value, composition.duration.timescale, composition.duration.value / composition.duration.timescale);
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
completion();
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
Here's the result of the statements that log the durations. The composition's is 3s long, which is what I want, but it still exports wrongly to 6s long:
videoAsset.duration... value: 1840, timescale: 600, seconds: 3
urlAsset.duration... value: 87040, timescale: 44100, seconds: 1
composition.duration... value: 1840, timescale: 600, seconds: 3
I created the 3s video file from a still image. Here's the code:
NSString *documentsFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)
objectAtIndex:0];
NSString *path = [documentsFolder stringByAppendingPathComponent:#"test_video.mp4"];
DDLogInfo(#"path: %#", path);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *removeItemError;
BOOL success = [fileManager removeItemAtPath:path error:&removeItemError];
if (success) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError localizedDescription]);
}
NSString *path2 = [documentsFolder stringByAppendingPathComponent:#"test_video_with_audio.mp4"];
DDLogInfo(#"path2: %#", path);
NSError *removeItemError2;
BOOL success2 = [fileManager removeItemAtPath:path2 error:&removeItemError2];
if (success2) {
NSLog(#"removed file");
}
else
{
NSLog(#"Could not delete file -:%# ",[removeItemError2 localizedDescription]);
}
//1. Wire the writer.
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
self.videoWriter = videoWriter;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:640], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings]; //retain should be removed if ARC
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:640] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
//2. Start a session
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero]; //use kCMTimeZero if unsure
UIImage *image = [UIImage imageNamed:#"dummy_square.jpg"];
CGImageRef cgImage = image.CGImage;
//3. Write some samples
//CVPixelBufferRef pixelBuffer = [self newPixelBufferFromCGImage:cgImage];
CVPixelBufferRef pixelBuffer = [self pixelBufferFromCGImage:cgImage];
BOOL result = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMakeWithSeconds(3.0, RECORDING_FPS)];
if (result == NO)
NSLog(#"failed to append buffer");
else
NSLog(#"appended buffer!");
if(pixelBuffer)
{
CVBufferRelease(pixelBuffer);
}
//4. Finish the session
[writerInput markAsFinished];
//[videoWriter endSessionAtSourceTime:…]; //optional can call finishWriting without specifiying endTime
self.library = [[ALAssetsLibrary alloc] init];
__weak ALAssetsLibrary *lib = self.library;
[videoWriter finishWritingWithCompletionHandler:^{
[self addAudioToFileAtPath:path toPath:path2 completion:^{
NSString *albumName = #"Test Album";
NSURL *pathUrl = [[NSURL alloc] initWithString:path2];
[lib addAssetsGroupAlbumWithName:albumName resultBlock:^(ALAssetsGroup *group) {
///checks if group previously created
if(group == nil){
//enumerate albums
[lib enumerateGroupsWithTypes:ALAssetsGroupAlbum
usingBlock:^(ALAssetsGroup *g, BOOL *stop)
{
//if the album is equal to our album
if ([[g valueForProperty:ALAssetsGroupPropertyName] isEqualToString:albumName]) {
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[g addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
}failureBlock:^(NSError *error){
}];
}else{
[lib writeVideoAtPathToSavedPhotosAlbum:pathUrl completionBlock:^(NSURL *assetURL, NSError *error) {
//then get the image asseturl
[lib assetForURL:assetURL
resultBlock:^(ALAsset *asset) {
//put it into our album
[group addAsset:asset];
} failureBlock:^(NSError *error) {
}];
}];
}
} failureBlock:^(NSError *error) {
}];
}];
}];
here i show u, how to convert from array of images and also adding music to video
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory12 = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory12 stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(480, 320);
NSUInteger fps = 1;
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//NSLog(#"fps :%f",(60.0-finalSec)/(float)[photoAlbumImages count]);
float numberOfSecondsPerFrame =60.0/(float)[photoAlbumImages count];
//NSLog(#"total fps :%f",numberOfSecondsPerFrame);
float frameDuration = fps * numberOfSecondsPerFrame;
NSLog(#"frame duration :%f",frameDuration);
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
for(UIImage * img12 in photoAlbumImages)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img12 CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(#"Processing video frame (%d,%d)",frameCount,[photoAlbumImages count]);
CMTime frameTime12 = CMTimeMake(frameCount*frameDuration,fps);
// NSLog(#"%#",frameTime12);
NSLog(#"seconds = %f", CMTimeGetSeconds(frameTime12));
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime12];
CVPixelBufferRelease(buffer);
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.3];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog (#"finished writing");
}];
NSLog(#"Write Ended");
////////////////////////////////////////////////////////////////////////////
////////////// OK now add an audio file to move file /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
AVURLAsset* audioAsset;
if(pathURL==NULL){
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
}
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *str=[NSString stringWithFormat:#"project1/imgtovid%#.mov",[self getCurrentDateTimeAsNSString]];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:str];
// NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
NSURL *outputFileUrl1;
if(outputFileUrl!=nil){
NSString* webStringURL = [outputFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
outputFileUrl1 = [NSURL URLWithString:webStringURL];
[self.project1Array addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
[positionArray addObject:[NSString stringWithFormat:#"file://localhost%#",outputFileUrl1]];
}
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
// NSLog(#"duration - %f",videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack12 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if(pathURL==NULL){
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}else{
[b_compositionAudioTrack12 insertTimeRange:audio_timeRange ofTrack:[[addAudioAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
_assetExport.outputURL = outputFileUrl;
NSLog(#"duration = %f", CMTimeGetSeconds(videoAsset.duration));
_assetExport.timeRange=CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
}
Here pixelbuffer from image
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef)photimage {
CGSize size = CGSizeMake(480, 320);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, size.width,
size.height), photimage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
If u want any more help please feel free to contact
I have same requirement of making movie from images and sounds. I have used this method to add movie and audio.
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//AUDIO FILE PATH
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"final_%#.m4a",appDelegate.storyName]];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:path];
//VIDEO FILE PATH
NSString* path1 = [documentsDirectory stringByAppendingPathComponent:#"temp.mp4"];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:path1];
//FINAL VIDEO PATH
NSString* path2 = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"outputFile_%#.mp4",appDelegate.storyName]];
NSURL* outputFileUrl = [NSURL fileURLWithPath:path2];
NSLog(#"%#",path2);
if ([[NSFileManager defaultManager] fileExistsAtPath:path2])
[[NSFileManager defaultManager] removeItemAtPath:path2 error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, video_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:CMTimeMake(2,1) error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//VIDEO COMPLETED
}
];
}
Sorry the code is little bit messy but I think it will work. Please let me know if I need to explain it more.
MOVIE MAKING CODE EDITED
#pragma mark - Movie Making Code
-(void)makeMovie{
//making movie and audio code ll be here
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"temp.mp4"]];
NSLog(#"%#",path);
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
CGSize sz = CGSizeMake(480, 320);
[self writeImageAsMovie:appDelegate.imageArray toPath:path size:sz];
}
- (void)writeImageAsMovie:(NSMutableArray *)image toPath:(NSString*)path size:(CGSize)size
{
//last screen adding
UIImage *tempImg=[UIImage imageNamed:#"powered_by.png"];
[image addObject:tempImg];
//last screen adding end
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
/* CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];
*/
float nxttime=0;
float time=0;
CVPixelBufferRef buffer=nil;
#autoreleasepool {
for(int i=0;i<image.count;i++)
{
NSLog(#"%d",image.count);
if([writerInput isReadyForMoreMediaData])
{
if(i!=0 && i!=4)
{
NSLog(#"AUDIO DURATION:%#",appDelegate.audioDuration);
nxttime=nxttime+time;
time=([[appDelegate.audioDuration objectAtIndex:i-1] floatValue])+1;
}
else if(i==0)
{
nxttime=0;
time=3;
}
else if(i==4)
{
nxttime=nxttime+5;
}
buffer = [self pixelBufferFromCGImage:[[image objectAtIndex:i] CGImage] size:size];
CMTime frameTime = CMTimeMake(time, 1 );
CMTime lastTime=CMTimeMake(nxttime, 1);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
// NSLog(#"%d: ft:%# Lt:%# PT:%#",i,frameTime,lastTime,presentTime);
[adaptor appendPixelBuffer:buffer withPresentationTime:lastTime];
}
else
{
i=i-1;
}
}
}
//Finish the session:
[writerInput markAsFinished];
[self latestCombineVoices];
[videoWriter finishWriting];
}
- (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options,
&pxbuffer);
status=status;//Added to make the stupid compiler not show a stupid warning.
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
// CGContextTranslateCTM(context, 0, CGImageGetHeight(image));
//CGContextScaleCTM(context, 1.0, -1.0);//Flip vertically to account for different origin
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
In my case For example: Have img1,img2,img3 and sound1,sound2,sound3 So I make a movie where img1 will show for sound1_duration and so on.
I was making a video out of an array of images and an audio file
but i can't seem to save it to the camera roll
some times it saves but some times it doesn't and gives me a strange error
outputFile.mp4 cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=-12848 "Movie could not be played." UserInfo=0x7bd7370 {NSLocalizedDescription=Movie could not be played.}
This function creates video from array of images :
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 16;
int kRecordingFPS = 30;
UIImage * im = [UIImage imageNamed:#"IMG_0103.JPG"];
NSArray * imageArray = [[NSArray alloc]initWithObjects:im,im,im,im,im,im,im,im,im,im,im,im,im,im,im,im, nil];
for(UIImage * img in imageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) kRecordingFPS);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
[videoWriterInput release];
[videoWriter release];
[imageArray release];
}
This function adds the audio :
-(void)CompileFilesToMakeMovie{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString* audio_inputFileName = #"Rihanna-Take a Bow.mp3";
NSString* audio_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"a.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[_assetExport exportAsynchronouslyWithCompletionHandler:nil];
[audioAsset release];
[videoAsset release];
[_assetExport release];
}
The function calls :
- (void)viewDidLoad
{
[super viewDidLoad];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
UIImage * i = [UIImage imageNamed:#"IMG_0103.JPG"];
[self writeImagesToMovieAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"a.mp4"] withSize:CGSizeMake(i.size.width, i.size.height)];
NSString* exportVideoPath1 = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"a.mp4"];
NSString* exportVideoPath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,#"outputFile.mp4"];
UISaveVideoAtPathToSavedPhotosAlbum (exportVideoPath,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
In my case, the reason was that AVAssetWriter hasn't finished writing. Solved it with the following piece of code:
__block BOOL _finished = NO;
[assetWriter finishWritingWithCompletionHandler:^{
_finished = YES;
}];
while (!_finished) {
[NSThread sleepForTimeInterval:0.1];
}