i have array of images and i converted them to movie video and now i want to now how to save that converted video to ipad.
can i save that converted video to iPad photo library
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
CFDataRef imgData = (CFDataRef)[array objectAtIndex:0];
CGDataProviderRef imgDataProvider = CGDataProviderCreateWithCFData (imgData);
CGImageRef image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault);
buffer = [self pixelBufferFromCGImage:image1];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:......
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
Try this open source component:-
https://www.cocoacontrols.com/controls/iqprojectvideo
This might help you. Use it according to your requirement.
Try with following code :
[_operationQueue addOperationWithBlock:^{
NSInteger i = 0;
NSString *path = [NSTemporaryDirectory() stringByAppendingFormat:#"%d.png",i];
UIImage *image;
NSDate *startDate;
while ((image = [UIImage imageWithContentsOfFile:path]))
{
while (1)
{
if (writerInput.readyForMoreMediaData == NO)
{
sleep(0.01);
continue;
}
else
{
//First time only
if (buffer == NULL)
{
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
startDate = [_dates objectAtIndex:i];
}
buffer = [IQProjectVideo pixelBufferFromCGImage:image.CGImage];
if (buffer)
{
if(i<_dates.count){
NSDate *currentDate = [_dates objectAtIndex:i];
Float64 interval = [currentDate timeIntervalSinceDate:startDate];
int32_t timeScale;
if (i == 0)
{
timeScale = 1.0/([[_dates objectAtIndex:i+1] timeIntervalSinceDate:currentDate]);
}
else
{
timeScale = 1.0/([currentDate timeIntervalSinceDate:[_dates objectAtIndex:i-1]]);
}
/**/
CMTime presentTime=CMTimeMakeWithSeconds(interval, MAX(33, timeScale));
// NSLog(#"presentTime:%#",(__bridge NSString *)CMTimeCopyDescription(kCFAllocatorDefault, presentTime));
if (_progressBlock != NULL)
{
dispatch_sync(dispatch_get_main_queue(), ^{
_progressBlock((CGFloat)i/(CGFloat)currentIndex);
});
}
// append buffer
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
CVPixelBufferRelease(buffer);
}
}
break;
}
}
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
path = [NSTemporaryDirectory() stringByAppendingFormat:#"%d.png",++i];
}
//Finish the session:
[writerInput markAsFinished];
if ([videoWriter respondsToSelector:#selector(finishWritingWithCompletionHandler:)])
{
[videoWriter finishWritingWithCompletionHandler:^{
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}];
}
else
{
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}
NSDictionary *fileAttrubutes = [[NSFileManager defaultManager] attributesOfItemAtPath:_path error:nil];
NSDictionary *dictInfo = [NSDictionary dictionaryWithObjectsAndKeys:
_path,IQFilePathKey,
[fileAttrubutes objectForKey:NSFileSize], IQFileSizeKey,
[fileAttrubutes objectForKey:NSFileCreationDate], IQFileCreateDateKey,
nil];
if (_completionBlock != NULL)
{
dispatch_sync(dispatch_get_main_queue(), ^{
_completionBlock(dictInfo,videoWriter.error);
});
}
NSString *openCommand = [NSString stringWithFormat:#"/usr/bin/open \"%#\"", NSTemporaryDirectory()];
system([openCommand fileSystemRepresentation]);
Related
i'm cutting video frame by frame and than give effect in frame (using aviary SDK) and than create video of that images.
well my code creates video with the expected duration but when i play this video it will play stickly (not smoothly).
Here is my code.
- (IBAction)generateVideo:(id)sender
{
NSLog(#"Enter in generateVideo Action");
NSString *videoPath = [[Util mySaveFileToPath] stringByAppendingString:#"/video2.mp4"];
UIImage * img = (UIImage *)[arrFramesOfVideo objectAtIndex:0];
[self exportImages:arrFramesOfVideo asVideoToPath:videoPath withFrameSize:img.size framesPerSecond:framePerSec numberOfSecondsPerFrame:(1/framePerSec)];
}
well fps i'm getting at the time when i'm fetching frames from video using AVAssetTrack so i think there is no prob with that.
- (void)exportImages:(NSMutableArray *)imageArray asVideoToPath:(NSString *)videoOutputPath withFrameSize:(CGSize)imageSize framesPerSecond:(float)fps numberOfSecondsPerFrame:(float)numberOfSecondsPerFrame
{
NSError *error = nil;
NSLog(#"Start building video from defined frames.");
NSLog(#"Video Path = %#",videoOutputPath);
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSLog(#"videoWriterInput == %#",videoWriterInput);
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
//double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(#"**************************************************");
//for(UIImage * img in imageArray)
for (int i = 0; i < arrFramesOfVideo.count; i++)
{
//NSLog(#"in loop");
UIImage * img = (UIImage *)[arrFramesOfVideo objectAtIndex:i];
buffer = [self pixelBufferFromCGImage:[img CGImage]];
//dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.001 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
NSLog(#"frameCount == %d",frameCount);
CMTime frameTime = CMTimeMake(i*numberOfSecondsPerFrame * 10000,10000);
//CMTime frameTime = CMTimeMakeWithSeconds(frameCount*numberOfSecondsPerFrame, fps);
CMTimeShow(frameTime);
//CMTime frameTime = CMTimeMake(frameCount*frameDuration,fps);
[adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
frameCount ++;
CVPixelBufferRelease(buffer);
}
else
{
i--;
NSLog(#"adaptor not ready.");
[NSThread sleepForTimeInterval:0.0001];
}
//});
}
NSLog(#"**************************************************");
//Finish the session:
[videoWriterInput markAsFinished];
//[videoWriter finishWriting];
[videoWriter finishWritingWithCompletionHandler:^{
if (videoWriter.status == AVAssetWriterStatusFailed)
{
NSLog(#"Movie Save Failed.");
}
else
{
NSLog(#"Movie Saved.");
}
}];
NSLog(#"Write Ended");
NSLog(#"Video Path = %#",videoOutputPath);
}
And other code is default for getting buffer i don't think it is required in this question B'coz it is common (without any changes).
If any body done this please help me i spent one day for this but still not found any solution.
Thank you!
I'm attempting to combine a number of video files into a single file with specific codec settings. I used to use the AVAssetExportSession for this, but I now need more control over the codec than the AVAssetExportSession offers.
Below I've posted the createFinalVideo: function that handles the combination of the video files.
The approach I've taken is attempting to write to the same file with an AVAssetWriter while simply starting the session at the location where the next video should be appended. I know this will not work because the AVAssetWriter apparently doesn't allow for this behavior.
Previously, I had the AVAssetWriter defined outside of the for loop, and I was attempting to add a new input for each video file (each pass of the for loop). However, it appears that AVAssetWriter doesn't allow for adding new inputs after [AVAssetWriter startWriting] has been called.
My question is how do I do what I'm trying to do the right way?
/**
* Final video creation. Merges audio-only and video-only files.
**/
-(void)createFinalVideo:(id)args
{
ENSURE_SINGLE_ARG(args, NSDictionary);
// presentation id
NSString * presID = [args objectForKey:#"presID"];
// array of video paths
NSArray * videoPathsArray = [args objectForKey:#"videoPaths"];
videoSuccessCallback = [args objectForKey:#"videoSuccess"];
videoCancelCallback = [args objectForKey:#"videoCancel"];
videoErrorCallback = [args objectForKey:#"videoError"];
NSError * error = nil;
NSFileManager * fileMgr = [NSFileManager defaultManager];
NSString * bundleDirectory = [[NSBundle mainBundle] bundlePath];
NSString * documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
/*********************************************************************/
/* BEGIN: merge all videos into a final MP4 */
/*********************************************************************/
// create the final video output file as MP4 file
NSString * finalOutputFilePath = [NSString stringWithFormat:#"%#/%#/final_video.mp4", documentsDirectory, presID];
NSURL * finalOutputFileUrl = [NSURL fileURLWithPath:finalOutputFilePath];
// delete file if it exists
if ([fileMgr fileExistsAtPath:finalOutputFilePath]) {
[fileMgr removeItemAtPath:finalOutputFilePath error:nil];
}
float renderWidth = 640, renderHeight = 480;
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:renderWidth], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:renderHeight], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:10], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:10], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1960000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:24],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:renderWidth], AVVideoWidthKey,
[NSNumber numberWithInt:renderHeight], AVVideoHeightKey,
AVVideoScalingModeResizeAspect, AVVideoScalingModeKey,
nil];
NSError *aerror = nil;
// next start time for adding to the compositions
CMTime nextStartTime = kCMTimeZero;
// loop through the video paths and add videos to the composition
for (NSString * path in videoPathsArray) {
// wait for each video to finish writing before continuing
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
// create video writer
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:finalOutputFileUrl fileType:AVFileTypeQuickTimeMovie error:nil];
NSParameterAssert(videoWriter);
NSLog(#"at the top of the for loop");
NSLog(#"%#", path);
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoCompressionSettings];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
// video setup
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:path] options:nil];
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
CMTime videoDuration = avAsset.duration;
// Wait until the duration is actually available
int durationAttempts = 5;
while(CMTimeGetSeconds(videoDuration) == 0 && durationAttempts > 0) {
durationAttempts--;
[NSThread sleepForTimeInterval:0.3];
videoDuration = avAsset.duration;
}
NSLog(#"[INFO] MODULE-VIDUTILS video duration in secs: %f", CMTimeGetSeconds(videoDuration));
//videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:nil];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSLog(#"startSessionAtSourceTime: %f", CMTimeGetSeconds(nextStartTime));
[videoWriter startSessionAtSourceTime:nextStartTime];
// set next start time
nextStartTime = CMTimeAdd(nextStartTime, videoDuration);
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("AVAssetWriterQueue", DISPATCH_QUEUE_SERIAL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
[reader cancelReading];
NSLog(#"NO RESULT");
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterInputStatusFailed: %#", videoWriter.error);
if (videoErrorCallback != nil) {
[self _fireEventToListener:#"videoError" withObject:nil listener:videoErrorCallback thisObject:nil];
}
return;
break;
}
} else {
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusReading:
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusCompleted:
[audioReader startReading];
[videoWriter startSessionAtSourceTime:nextStartTime];
NSLog(#"Request");
NSLog(#"Asset Writer ready :%d", audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading && (nextBuffer = [readerOutput copyNextSampleBuffer])) {
NSLog(#"Ready");
if (nextBuffer) {
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
} else {
[audioWriterInput markAsFinished];
//dictionary to hold duration
if ([audioReader status] == AVAssetReaderStatusCompleted) {
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetReaderStatusCompleted");
[videoWriter finishWritingWithCompletionHandler:^{
switch([videoWriter status]) {
case AVAssetWriterStatusCompleted:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterStatusCompleted");
dispatch_semaphore_signal(semaphore);
break;
case AVAssetWriterStatusCancelled:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterStatusCancelled");
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoCancel" withObject:nil listener:videoCancelCallback thisObject:nil];
}
return;
break;
case AVAssetWriterStatusFailed:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetWriterStatusFailed");
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoError" withObject:nil listener:videoErrorCallback thisObject:nil];
}
return;
break;
}
}];
break;
}
}
}
break;
case AVAssetReaderStatusFailed:
NSLog (#"[INFO] MODULE-VIDUTILS createFinalVideo AVAssetReaderStatusFailed, #%", reader.error);
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoError" withObject:nil listener:videoErrorCallback thisObject:nil];
}
[videoWriter cancelWriting];
return;
break;
}
break;
}
}
}];
// wait for the writing to finish
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
NSLog(#"Write Ended");
}
NSLog(#"got here -- should have waited for all videos to complete first");
// call success if we got here
if (videoSuccessCallback != nil) {
[self _fireEventToListener:#"videoSuccess" withObject:nil listener:videoSuccessCallback thisObject:nil];
}
}
I found a replacement for AVAssetExportSession called SDAVAssetExportSession that allows you to specify settings instead of using presets.
I am creating a video file from an array of images. I am able to create video file on simulator, however when I try to run the same code on device it gives following error:
NSURLErrorDomain Code=-3000 "Cannot create file" UserInfo=0x200be260
{NSUnderlyingError=0x200bb030 "The operation couldn’t be completed.
(OSStatus error -12149.)", NSLocalizedDescription=Cannot create file}
I have searched a lot but couldn't find anything.
Here is the code for creating path.
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/movie.mp4"]];
-(void)exportImages:(NSArray *)imageArray
asVideoToPath:(NSString *)path
withFrameSize:(CGSize)imageSize
framesPerSecond:(NSUInteger)fps {
NSLog(#"Start building video from defined frames.");
NSError *error = nil;
NSURL *url = [NSURL fileURLWithPath:path];
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
url fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(UIImage * img in imageArray) {
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:imageSize];
while (1) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
break;
}
}
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
NSString *border = #"**************************************************";
NSLog(#"\n%#\nProcessing video frame (%d,%d).\n%#",border,frameCount,[imageArray count],border);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Write Ended");
}];
}
I had the same error, while I was trying to save movie in existing path.
Try this code to create path for your new video:
- (NSURL *)createOutputURLWithDate
{
NSDateFormatter *kDateFormatter;
kDateFormatter = [[NSDateFormatter alloc] init];
kDateFormatter.dateStyle = NSDateFormatterMediumStyle;
kDateFormatter.timeStyle = kCFDateFormatterLongStyle;
return [[[[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory
inDomain:NSUserDomainMask
appropriateForURL:nil
create:#YES error:nil] URLByAppendingPathComponent:[kDateFormatter stringFromDate:[NSDate date]]] URLByAppendingPathExtension:CFBridgingRelease(UTTypeCopyPreferredTagWithClass((CFStringRef)AVFileTypeQuickTimeMovie, kUTTagClassFilenameExtension))];
}
I've been attempting to use AVAssetWriter to crop and export a video in an iOS app. Everything has been going swimmingly up until I attempted to add audio to the output. The current code (see below) returns the following error
NSInvalidArgumentException', reason: '* -[AVAssetReader startReading] cannot be called again after reading has already started'
From what I've written below, this should be using a different reader than the one that was initialized, and so I don't understand the issue. This may be a simple issue of needing another pair of eyes on it, so here's the code.
-(void)cropVideoAsset:(NSURL *)fileURL
{
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"croppedVideo.mov"];
toUrl = [NSURL fileURLWithPath:outputPath];
if([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
{[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];}
AVAsset *asset = [AVAsset assetWithURL:fileURL];
NSLog(#"asset: %f", CMTimeGetSeconds(asset.duration));
NSDictionary* compressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoProfileLevelH264Main31, AVVideoProfileLevelKey,
[NSNumber numberWithInt:2500000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt: 5], AVVideoMaxKeyFrameIntervalKey,
nil];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:500], AVVideoWidthKey,
[NSNumber numberWithInt:500], AVVideoHeightKey,
AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
compressionSettings, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
writerInput.expectsMediaDataInRealTime = true;
NSError *error;
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:toUrl fileType:AVFileTypeMPEG4 error:&error];
[assetWriter addInput:writerInput];
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
writerInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoOptions];
[assetReader addOutput:asset_reader_output];
//audio setup
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil];
AVAssetWriterInput *audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
audioWriterInput.expectsMediaDataInRealTime = true;
[assetWriter addInput:audioWriterInput];
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[assetReader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
while(writerInput.readyForMoreMediaData)
{
switch(assetReader.status)
{
case AVAssetReaderStatusReading:
{
CMSampleBufferRef sampleBuffer = [asset_reader_output copyNextSampleBuffer];
if (sampleBuffer)
{
BOOL result = [writerInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result)
{
[SVProgressHUD dismiss];
[self enableView];
NSString *errorMessage = [NSString stringWithFormat:#"We're sorry, an error occurred during the video capture.\n\n%#\n\nPlease try again.", assetWriter.error.localizedDescription];
[UIAlertView simpleAlertWithTitle:#"Video Error" andMessage:errorMessage];
[assetReader cancelReading];
break;
}
}
}
break;
case AVAssetReaderStatusCompleted:
{
[self handleAudioWithReader:audioReader writer:assetWriter input:audioWriterInput andOutput:readerOutput];
}
break;
case AVAssetReaderStatusFailed:
[SVProgressHUD dismiss];
[self enableView];
[UIAlertView simpleAlertWithTitle:#"Video Error" andMessage:#"We're sorry, an error occurred during the video capture. Please try again."];
[assetWriter cancelWriting];
break;
}
}
}];
}
and the requisite call
-(void)handleAudioWithReader:(AVAssetReader *)audioReader writer:(AVAssetWriter *)videoWriter input:(AVAssetWriterInput *)audioWriterInput andOutput:(AVAssetReaderOutput *)readerOutput
{
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(#"Asset Writer ready :%d", audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer;
if([audioReader status] == AVAssetReaderStatusReading && (nextBuffer = [readerOutput copyNextSampleBuffer]))
{
if (nextBuffer)
{
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
else
{
[audioWriterInput markAsFinished];
switch ([audioReader status])
{
case AVAssetReaderStatusCompleted:
[videoWriter finishWriting];
[audioWriterInput markAsFinished];
dispatch_async(dispatch_get_main_queue(),^{[self saveAssetToLibraryAtURL:toUrl];});
break;
}
}
}
}];
}
I am attempting to use a combination of the sample provided for encoding asset writer and the sample provided from pixelBufferFromCGImage to overlay a UIImage atop a AVAsset that I'm exporting.
The problem is, despite the True result of this call
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
The exported avasset is corrupted, yeilding an unexpected size and a subsequent access to it failing with 'This media may be damaged'.
The export itself, if I avoid attempting to use the appendPixelBuffer call, is successful as desired. However, locating it before the dispatch queue or within it yeilds the same failure.
Hopefully not redundantly posting here, but other samples here in stack overflow don't seem to address this particular combination failure. Thanks, code below
Export code:
AVAsset *sourceAsset = [AVAsset assetWithURL:outputUrl];
NSError *error = nil;
NSString *fileName = [NSString stringWithFormat:#"non_transform_%f.mov", [[NSDate date] timeIntervalSince1970]];
NSString *combinedPath = [NSString stringWithFormat:#"file://localhost%#/%#", [[GroupDiskManager sharedManager] getFolderPath], fileName];
NSURL *outputURL = [NSURL URLWithString:combinedPath];
NSLog(#"combined path: %#", combinedPath);
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
AVAssetTrack *videoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:1280], AVVideoWidthKey,
[NSNumber numberWithInt:720], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:1280] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:720] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:attributes];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:sourceAsset error:&aerror];
[reader addOutput:asset_reader_output];
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:sourceAsset error:&error] retain];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:audioReaderOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
CVPixelBufferRef buffer = [ImageToMovieManager pixelBufferFromCGImage:[UIImage imageNamed:#"234_1280x720_3.jpg"].CGImage size:CGSizeMake(1280, 720)];
BOOL theResult = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (theResult == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer) {
CVBufferRelease(buffer);
}
dispatch_queue_t _processingQueue = dispatch_queue_create("_processingQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
NSLog(#"requestMediaDataWhenReadyOnQueue");
[self retain];
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
NSLog(#" result == nil Cancel!");
NSLog(#"videoWriter.error: %#", videoWriter.error);
[reader cancelReading];
break;
}
} else {
NSLog(#"[videoWriterInput markAsFinished]");
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusReading:
NSLog(#"reading");
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusCompleted:
NSLog(#"AVAssetReaderStatusCompleted");
[audioReader startReading];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(nextBuffer = [audioReaderOutput copyNextSampleBuffer])) {
if (nextBuffer) {
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}else{
[audioWriterInput markAsFinished];
switch ([audioReader status]) {
case AVAssetReaderStatusCompleted:
NSLog(#"AVAssetReaderStatusCompleted!!");
[videoWriter finishWriting];
[VideoManager videoSavedWithURL:outputURL withError:(NSError *)error];
break;
}
}
}
}
];
break;
case AVAssetReaderStatusFailed:
NSLog(#"AVAssetReaderStatusFailed");
[videoWriter cancelWriting];
break;
}
break;
}
}
}
];
pixelBufferFromCGImageCode
CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipLast);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
At the very least, the pixelFormat should be specified as kCVPixelFormatType_32BGRA not kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange.