Capture movie with AVAssetWriter skipping frames in iphone 4 - ios

I'm trying capture a movie using AVAssetWriter, in the iphone 5 everything is all right, capture and save movie like a charm.
But when I try capture movie in iphone 4, the samplebuffer skip some frames and the movie is not good.
So, this is my code:
- (void) initCaptureSession{
// openSession and set quality to 1280x720
session = [[AVCaptureSession alloc] init];
if([session canSetSessionPreset:AVCaptureSessionPreset640x480]) session.sessionPreset = AVCaptureSessionPresetHigh;
// get devices for audio and video
deviceVideo = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
deviceAudio = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
// create input of audio and video
inputVideo = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo error:&error];
if (!inputVideo) NSLog(#"ERROR: trying to open camera: %#", error);
inputAudio = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio error:&error];
if (!inputAudio) NSLog(#"ERROR: trying to open audio: %#", error);
// CMTime maxDuration = CMTimeMake(60, 1);
// create output audio and video
outputVideo = [[AVCaptureVideoDataOutput alloc] init];
outputVideo.alwaysDiscardsLateVideoFrames = NO;
outputVideo.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
outputAudio = [[AVCaptureAudioDataOutput alloc] init];
// add inputs and outputs in the current session
[session beginConfiguration];
if ([session canAddInput:inputVideo])[session addInput:inputVideo];
if ([session canAddInput:inputAudio])[session addInput:inputAudio];
if ([session canAddOutput:outputVideo]) [session addOutput:outputVideo];
if ([session canAddOutput:outputAudio]) [session addOutput:outputAudio];
[session commitConfiguration];
// tourn of the torch
[deviceVideo lockForConfiguration:&error];
if([deviceVideo hasTorch] && [deviceVideo isTorchModeSupported:AVCaptureTorchModeOff]) [deviceVideo setTorchMode:AVCaptureTorchModeOff];
[deviceVideo unlockForConfiguration];
[self configDevice];
// create the preview view to show the video
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
captureVideoPreviewLayer.frame = viewPreview.bounds;
[viewPreview.layer addSublayer:captureVideoPreviewLayer];
CALayer *viewLayer = viewPreview.layer;
[viewLayer setMasksToBounds:YES];
[captureVideoPreviewLayer setFrame:[viewLayer bounds]];
[viewLayer addSublayer:captureVideoPreviewLayer];
// dispatch outputs to delegate in a queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[outputVideo setSampleBufferDelegate:self queue:queue];
[outputAudio setSampleBufferDelegate:self queue:queue];
// dispatch_release(queue);
[session startRunning];
}
-(BOOL) setupWriter{
urlOutput = [self tempFileURL];
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:urlOutput fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
// Add metadata
NSArray *existingMetadataArray = videoWriter.metadata;
NSMutableArray *newMetadataArray = nil;
if (existingMetadataArray) {
newMetadataArray = [existingMetadataArray mutableCopy];
} else {
newMetadataArray = [[NSMutableArray alloc] init];
}
AVMutableMetadataItem *mutableItemLocation = [[AVMutableMetadataItem alloc] init];
mutableItemLocation.keySpace = AVMetadataKeySpaceCommon;
mutableItemLocation.key = AVMetadataCommonKeyLocation;
mutableItemLocation.value = [NSString stringWithFormat:#"%+08.4lf%+09.4lf/", location.latitude, location.longitude];
AVMutableMetadataItem *mutableItemModel = [[AVMutableMetadataItem alloc] init];
mutableItemModel.keySpace = AVMetadataKeySpaceCommon;
mutableItemModel.key = AVMetadataCommonKeyModel;
mutableItemModel.value = [[UIDevice currentDevice] model];
[newMetadataArray addObject:mutableItemLocation];
[newMetadataArray addObject:mutableItemModel];
videoWriter.metadata = newMetadataArray;
// video Configuration
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:360], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
// if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
// } else {
// // should work on any device requires more space
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
// [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
// [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
// [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
// [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
// nil ];
// }
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
return YES;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if( !CMSampleBufferDataIsReady(sampleBuffer) ){
NSLog( #"sample buffer is not ready. Skipping sample" );
return;
}
if(isRecording == YES ){
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if(videoWriter.status != AVAssetWriterStatusWriting ){
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:lastSampleTime];
}
if( captureOutput == outputVideo ){
[self newVideoSample:sampleBuffer];
} else if( captureOutput == outputAudio) {
[self newAudioSample:sampleBuffer];
}
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
if( isRecording ){
if( videoWriter.status > AVAssetWriterStatusWriting ) {
NSLog(#"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", videoWriter.error);
return;
}
while (!videoWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
if( isRecording ){
if( videoWriter.status > AVAssetWriterStatusWriting ) {
NSLog(#"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", videoWriter.error);
return;
}
while (!audioWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to audio input");
}
}
-(void) startVideoRecording {
if( !isRecording ){
NSLog(#"start video recording...");
if( ![self setupWriter] ) {
NSLog(#"Setup Writer Failed") ;
return;
}
isRecording = YES;
recorded = NO;
}
}
-(void) stopVideoRecording {
if( isRecording ) {
isRecording = NO;
btRecord.hidden = NO;
btRecording.hidden = YES;
[timerToRecord invalidate];
timerToRecord = nil;
// [session stopRunning];
[videoWriter finishWritingWithCompletionHandler:^{
if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
videoWriterInput = nil;
audioWriterInput = nil;
videoWriter = nil;
NSLog(#"finishWriting returned succeful");
recorded = YES;
} else {
NSLog(#"finishWriting returned unsucceful") ;
}
}];
NSLog(#"video recording stopped");
[self performSelector:#selector(openPlayer) withObject:nil afterDelay:0.5];
}
}
When I remove this lines:
while (!audioWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
I got this error:
* Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: '* -[AVAssetWriterInput appendSampleBuffer:] A sample buffer cannot be appended when readyForMoreMediaData is NO.'
In iphone 5 I'm not using this looping.
I read some examples here, but I didn't understand how can I make a movie smoother in iphone 4.
If anyone have one sugestion or full example to make movies using AVAssetWriter for iphone 3gs, iphone 4, iphone 4s and iphone 5, I would thanks a lot.
Thanks

After one week fight with AVFoundation I got a good solution.
After watch wwdc2012 - session 520 I made a good solution.
First I record the movie using AVCaptureMovieFileOutput with session presset AVCaptureSessionPreset640x480
So after record user choose if want save and share, just save or delete the movie.
If user want save/save and share I get the movie recorded and compact separately.
First I compress the movie, after I compress the audio and marge the tracks.
See my code:
-(void)exportMediaWithURL:(NSURL *)url location:(CLLocationCoordinate2D)location mirror:(BOOL)mirror{
urlMedia = url;
locationMedia = location;
videoRecorded = NO;
audioRecorded = NO;
asset = [AVAsset assetWithURL:urlMedia];
progressVideo = 0.0;
progressAudio = 0.0;
progressMarge = 0.0;
progressFactor = 3.0;
mirrored = mirror;
limitTime = CMTimeMake(1000*60, 1000);
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:#"tracks"] completionHandler:^() {
NSError *error;
AVKeyValueStatus stats = [asset statusOfValueForKey:#"tracks" error:&error];
if(stats == AVKeyValueStatusLoaded){
if([[asset tracksWithMediaType:AVMediaTypeVideo] count] > 0) video_track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
if([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0) audio_track = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
if(!audio_track) progressFactor = 1.0;
if(video_track){
if (CMTimeCompare(asset.duration, limitTime) > 0) {
totalTime = limitTime;
}else{
totalTime = asset.duration;
}
[self exportVideo];
}
}
}];
}
-(void)exportVideo{
NSError *error;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderOutput *videoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:video_track outputSettings:videoSettings];
[assetReader addOutput:videoOutput];
assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
// start session to make a movie
if (assetVideoWriter.status == AVAssetWriterStatusUnknown) {
if ([self setupWriterVideo]) {
if ([assetVideoWriter startWriting]) {
[assetVideoWriter startSessionAtSourceTime:kCMTimeZero];
}
}
}
if([assetReader startReading]){
BOOL videoDone = NO;
CMSampleBufferRef bufferVideo;
while (!videoDone) {
if ([assetReader status]== AVAssetReaderStatusReading ) bufferVideo = [videoOutput copyNextSampleBuffer];
if(bufferVideo){
[self newVideoSample:bufferVideo];
CFRelease(bufferVideo);
}else{
videoDone = YES;
}
}
// finish
[videoWriterInput markAsFinished];
[assetVideoWriter finishWritingWithCompletionHandler:^{}];
// gambiarra to resolve the dealloc problem when use a block to delegate something
while (!videoRecorded) {
if (assetVideoWriter.status == AVAssetWriterStatusCompleted) {
videoWriterInput = nil;
assetVideoWriter = nil;
videoRecorded = YES;
if (audio_track) {
[self exportAudio];
}else{
NSMutableDictionary *infoToSend = [NSMutableDictionary new];
[infoToSend setValue:urlOutputVideo forKey:#"url_media"];
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
}
}
}
}
}
-(void)exportAudio{
NSError *error;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
NSDictionary* audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, nil];
AVAssetReaderOutput *audioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audio_track outputSettings:audioSettings];
[assetReader addOutput:audioOutput];
assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
// start session to make a movie
if (assetAudioWriter.status == AVAssetWriterStatusUnknown) {
if ([self setupWriterAudio]) {
if ([assetAudioWriter startWriting]) {
[assetAudioWriter startSessionAtSourceTime:kCMTimeZero];
}
}
}
if([assetReader startReading]){
BOOL audioDone = NO;
CMSampleBufferRef bufferAudio;
while (!audioDone) {
if ([assetReader status]== AVAssetReaderStatusReading ) bufferAudio = [audioOutput copyNextSampleBuffer];
if(bufferAudio){
[self newAudioSample:bufferAudio];
CFRelease(bufferAudio);
}else{
audioDone = YES;
}
}
// finish
[audioWriterInput markAsFinished];
[assetAudioWriter finishWritingWithCompletionHandler:^{}];
// gambiarra to resolve the dealloc problem when use a block to delegate something
while (!audioRecorded) {
if (assetAudioWriter.status == AVAssetWriterStatusCompleted) {
audioWriterInput = nil;
assetAudioWriter = nil;
audioRecorded = YES;
[self margeFile];
}
}
}
}
-(void)margeFile{
AVURLAsset *assetVideo = [AVURLAsset assetWithURL:urlOutputVideo];
AVAssetTrack *video_track_marge = [[assetVideo tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVURLAsset *assetAudio = [AVURLAsset assetWithURL:urlOutputAudio];
AVAssetTrack *audio_track_marge = [[assetAudio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTime startTime = CMTimeMake(1, 1);
CMTimeRange timeRangeVideo = CMTimeRangeMake(kCMTimeZero, assetVideo.duration);
CMTimeRange timeRangeAudio = CMTimeRangeMake(kCMTimeZero, assetAudio.duration);
AVMutableComposition * composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
if(mirrored) compositionVideoTrack.preferredTransform = CGAffineTransformMakeRotation(M_PI);
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error;
[compositionVideoTrack insertTimeRange:timeRangeVideo ofTrack:video_track_marge atTime:startTime error:&error];
[compositionAudioTrack insertTimeRange:timeRangeAudio ofTrack:audio_track_marge atTime:startTime error:&error];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
exportSession.outputFileType = AVFileTypeAppleM4V;
exportSession.outputURL = [self tempFileURL:media_mixed];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.metadata = newMetadataArray;
exportSession.timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(1.0, 600), totalTime);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSMutableDictionary *infoToSend = [NSMutableDictionary new];
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[infoToSend setValue:exportSession.outputURL forKey:#"url_media"];
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
break;
case AVAssetExportSessionStatusExporting:
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"failed");
break;
}
}];
while (exportSession.status == AVAssetExportSessionStatusExporting) {
progressMarge = exportSession.progress;
[self postProgress];
}
}
-(BOOL) setupWriterVideo{
urlOutputVideo = [self tempFileURL:media_video];
NSError *error = nil;
assetVideoWriter = [[AVAssetWriter alloc] initWithURL:urlOutputVideo fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(assetVideoWriter);
// Add metadata
NSArray *existingMetadataArray = assetVideoWriter.metadata;
if (existingMetadataArray) {
newMetadataArray = [existingMetadataArray mutableCopy];
} else {
newMetadataArray = [[NSMutableArray alloc] init];
}
AVMutableMetadataItem *mutableItemLocation = [[AVMutableMetadataItem alloc] init];
mutableItemLocation.keySpace = AVMetadataKeySpaceCommon;
mutableItemLocation.key = AVMetadataCommonKeyLocation;
mutableItemLocation.value = [NSString stringWithFormat:#"%+08.4lf%+09.4lf/", locationMedia.latitude, locationMedia.longitude];
AVMutableMetadataItem *mutableItemModel = [[AVMutableMetadataItem alloc] init];
mutableItemModel.keySpace = AVMetadataKeySpaceCommon;
mutableItemModel.key = AVMetadataCommonKeyModel;
mutableItemModel.value = [[UIDevice currentDevice] model];
[newMetadataArray addObject:mutableItemLocation];
[newMetadataArray addObject:mutableItemModel];
assetVideoWriter.metadata = newMetadataArray;
assetVideoWriter.shouldOptimizeForNetworkUse = YES;
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:[self videoConfiguration]];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = NO;
// add input
[assetVideoWriter addInput:videoWriterInput];
return YES;
}
-(BOOL) setupWriterAudio{
urlOutputAudio = [self tempFileURL:media_audio];
NSError *error = nil;
assetAudioWriter = [[AVAssetWriter alloc] initWithURL:urlOutputAudio fileType:AVFileTypeAppleM4A error:&error];
NSParameterAssert(assetAudioWriter);
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioConfiguration]];
audioWriterInput.expectsMediaDataInRealTime = NO;
// add input
[assetAudioWriter addInput:audioWriterInput];
return YES;
}
- (NSDictionary *)videoConfiguration{
// video Configuration
// float bitsPerPixel;
// int numPixels = 640.0 * 360.0;
// int bitsPerSecond;
//
// // Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
// if ( numPixels < (640 * 360.0) )
// bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
// else
// bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
//
// bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:360], AVVideoHeightKey,
nil];
return videoSettings;
}
-(NSDictionary *)audioConfiguration{
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
// if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 128000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
// } else {
// // should work on any device requires more space
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
// [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
// [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
// [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
// [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
// nil ];
// }
return audioOutputSettings;
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
if( assetVideoWriter.status > AVAssetWriterStatusWriting ) {
if( assetVideoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", assetVideoWriter.error);
return;
}
if (assetVideoWriter.status == AVAssetWriterStatusWriting ) {
while (!videoWriterInput.readyForMoreMediaData) NSLog(#"waitting video");
if (videoWriterInput.readyForMoreMediaData) {
CMTime presTime = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
float valueLoading = (presTime.value / presTime.timescale);
float valueTotal = (totalTime.value / totalTime.timescale);
progressVideo = valueLoading / valueTotal;
[self postProgress];
if (![videoWriterInput appendSampleBuffer:sampleBuffer]) NSLog(#"Unable to write to video input");
}
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
if( assetAudioWriter.status > AVAssetWriterStatusWriting ) {
if( assetAudioWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", assetAudioWriter.error);
return;
}
if (assetAudioWriter.status == AVAssetWriterStatusWriting ) {
while (!audioWriterInput.readyForMoreMediaData) NSLog(#"waitting audio");
if (audioWriterInput.readyForMoreMediaData) {
CMTime presTime = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
float valueLoading = (presTime.value / presTime.timescale);
float valueTotal = (totalTime.value / totalTime.timescale);
progressAudio = valueLoading / valueTotal;
[self postProgress];
if (![audioWriterInput appendSampleBuffer:sampleBuffer]) {
NSLog(#"Unable to write to audio input");
}
}
}
}
- (void)postProgress{
float totalProgress = (progressVideo + progressAudio + progressMarge) / progressFactor;
NSMutableDictionary *infoToSend = [NSMutableDictionary new];
[infoToSend setValue:[NSNumber numberWithFloat:totalProgress] forKey:#"progress"];
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self userInfo:infoToSend];
}
- (NSURL *)tempFileURL:(int)typeMedia {
NSString *outputPath;
switch (typeMedia) {
case media_video:
outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output_export.mp4"];
break;
case media_audio:
outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output_export.m4a"];
break;
case media_mixed:
outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"mixed.mp4"];
break;
}
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
return outputURL;
}
- (void) dealloc {
NSLog(#"dealloc video exporter");
[[NSNotificationCenter defaultCenter] removeObserver:self];
assetVideoWriter = nil;
assetAudioWriter = nil;
videoWriterInput = nil;
audioWriterInput = nil;
urlMedia = nil;
urlOutputVideo = nil;
urlOutputAudio = nil;
urlOutputFinal = nil;
}
#end
If someone have something to add, please post here!

set the AVAssetWriterInput.outputSettings[AVVideoCompressionPropertiesKey][AVVideoAllowFrameReorderingKey] = #(NO)
enter image description here

Related

Audio missing while getting sample buffers through AVAssetReaderTrackOutput in iOS?

Here I am getting sample buffers using asset reader and then processing each frame for the customization purpose. But audio missing for the final video saved in documents. I know there is another way to process each frame like "applyingCIFiltersWithHandler" But need each sample buffer and render image or filter over that. Suggest me solution for this?
NSError *error;
NSString *path = [[NSBundle mainBundle] pathForResource:#"recordmovie" ofType:#"mov"];
NSURL *videoURL = [NSURL fileURLWithPath:path];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:nil];;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// add audio track here
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
CGSize renderSize = [videoTrack naturalSize];
/*
NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264 , AVVideoCodecKey,
renderSize.width , AVVideoWidthKey,
renderSize.height , AVVideoHeightKey,
AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey, nil];
*/
AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
outputSettings:readerOutputSettings];
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];
NSDictionary *settings = #{ AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatLinearPCM] };
AVAssetReaderTrackOutput *audioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:settings];
[reader addOutput:readerOutput];
[reader addOutput:audioTrackOutput];
[reader startReading];
NSMutableArray *samples = [[NSMutableArray alloc] init];
CMSampleBufferRef sample;
while((sample = [readerOutput copyNextSampleBuffer])) {
[samples addObject:(__bridge id)sample];
CFRelease(sample);
}
NSString *outputPath = [self getDocumentsUrlForFilterMovie];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
#(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
nil];
NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
[writerInput setExpectsMediaDataInRealTime:NO];
[writer addInput:writerInput];
AVAssetWriterInput *WriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
WriterAudioInput.expectsMediaDataInRealTime = YES;
if([writer canAddInput:WriterAudioInput]) {
[writer addInput:WriterAudioInput];
}
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
[writer startWriting];
[writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];
//NSMutableArray *audioSamples = [[NSMutableArray alloc] init];
while((sample = [audioTrackOutput copyNextSampleBuffer])) {
//[audioSamples addObject:(__bridge id)sample];
[WriterAudioInput appendSampleBuffer:sample];
while (!WriterAudioInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
CFRelease(sample);
}
CIFilter *filter = [CIFilter filterWithName:#"CISepiaTone"];
[filter setDefaults];
[filter setValue:#(1) forKey:kCIInputIntensityKey];
//CIImage *outputImage = filter.outputImage;
for(NSInteger i = 0; i < samples.count; i++) {
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);
//CVPixelBufferRef videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);
CVPixelBufferRef videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[i]);
CIImage *frameImage = [CIImage imageWithCVPixelBuffer:videoFrameBuffer];
[filter setValue:frameImage forKey:kCIInputImageKey];
CIImage *outputImage = filter.outputImage;
//}
[self->ciContext render:outputImage toCVPixelBuffer:videoFrameBuffer bounds:outputImage.extent colorSpace:self->colorSpace];
while (!writerInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
// [writerInput appendSampleBuffer:videoFrameBuffer];
[pixelBufferAdaptor appendPixelBuffer:videoFrameBuffer withPresentationTime:presentationTime];
}
[writerInput markAsFinished];
[writer finishWritingWithCompletionHandler:^(){
//[self.delegate didFinishReverse:YES andVideoURL:outputURL withError:error];
NSLog(#"Finish video rendering");
}];
});
Missed adding audio files to audioAssetWriterInput. I fixed that issue by adding audio sample buffers.
Here I am adding code to getting audio , video sample buffers from existing video, then write and save to local documents. You can apply filters and render images over image for required frames and total frames in specific frame area.
NSError *error;
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];;
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
NSDictionary *videoReaderOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetReaderTrackOutput* assetReaderVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoReaderOutputSettings];
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];
NSDictionary *audioDecodesettings = #{ AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatLinearPCM] };
AVAssetReaderTrackOutput *assetReaderAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:audioDecodesettings];
[assetReader addOutput:assetReaderVideoTrackOutput];
[assetReader addOutput:assetReaderAudioTrackOutput];
[assetReader startReading];
NSMutableArray *samples = [[NSMutableArray alloc] init];
CMSampleBufferRef sample;
while((sample = [assetReaderVideoTrackOutput copyNextSampleBuffer])) {
[samples addObject:(__bridge id)sample];
CFRelease(sample);
}
NSString *outputPath = [self getDocumentsUrlForFilterMovie];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
#(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
nil];
NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *videoWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
[videoWriterInput setExpectsMediaDataInRealTime:NO];
[assetWriter addInput:videoWriterInput];
AVAssetWriterInput *audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
if([assetWriter canAddInput:audioWriterInput]) {
[assetWriter addInput:audioWriterInput];
}
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil];
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];
while((sample = [assetReaderAudioTrackOutput copyNextSampleBuffer])) {
[audioWriterInput appendSampleBuffer:sample];
while (!audioWriterInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
CFRelease(sample);
}
for(NSInteger i = 0; i < samples.count; i++) {
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);
CVPixelBufferRef videoFrameBuffer = nil;
if(frameRenderType == KVideoNormal) {
videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[i]);
} else if (frameRenderType == KVideoReverse) {
videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);
}
if(self.filters.count > 0) {
CIImage *frameImage = [CIImage imageWithCVPixelBuffer:videoFrameBuffer];
for(CIFilter *filter in self.filters) {
[filter setValue:frameImage forKey:kCIInputImageKey];
frameImage = filter.outputImage;
}
[self->ciContext render:frameImage toCVPixelBuffer:videoFrameBuffer bounds:frameImage.extent colorSpace:self->colorSpace];
}
while (!videoWriterInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
[pixelBufferAdaptor appendPixelBuffer:videoFrameBuffer withPresentationTime:presentationTime];
}
[videoWriterInput markAsFinished];
[assetWriter finishWritingWithCompletionHandler:^(){
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(#"Finished video processing");
});
}];
});

Reversing an audio file

I'm using AVAssetReader and AVAssetWriter to reverse an audio file. However, the resulting reversed audio is very jerky.
What's the best practice for reversing an audio file?
Any help is much appreciated.
-(void)reverseAudio:(NSURL *)videoURL andVideoAsset:(AVURLAsset *)videoAsset{
AVAssetReader *video2AssetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
video2AssetReader.timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero, [videoAsset duration]);
NSArray *audioTracks = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
NSDictionary *outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput *readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:outputSettingsDict];
[video2AssetReader addOutput:readerAudioTrackOutput];
[video2AssetReader startReading];
// read in the samples
NSMutableArray *audioSamples = [[NSMutableArray alloc] init];
CMSampleBufferRef audioSample;
while((audioSample = [readerAudioTrackOutput copyNextSampleBuffer])){
[audioSamples addObject:(__bridge id)audioSample];
CFRelease(audioSample);
}
videoReverseProcess3TotalFrames = audioSamples.count;
NSLog(#"AUDIO SAMPLES COUNT = %f", videoReverseProcess3TotalFrames);
[video2AssetReader cancelReading];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *videoPath = [documentsDirectory stringByAppendingPathComponent:#"videoReverseAudioFile.m4a"];
NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:videoPath]){
[[NSFileManager defaultManager] removeItemAtPath:videoPath error:&error];
if(error){
NSLog(#"VIDEO DELETE FAILED");
}
else{
NSLog(#"VIDEO DELETED");
}
}
NSURL *audioExportURL = [[NSURL alloc] initFileURLWithPath:videoPath];
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:audioExportURL fileType:AVFileTypeAppleM4A error:&error];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSNumber numberWithInt:128000], AVEncoderBitRateKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey, nil];
AVAssetWriterInput *writerAudioInput;
writerAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
writerAudioInput.expectsMediaDataInRealTime = NO;
if([writer canAddInput:writerAudioInput]){
[writer addInput:writerAudioInput];
}
else{
NSLog(#"ERROR ADDING AUDIO");
}
[writer startWriting];
CMTime timeStamp = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)audioSamples[0]);
[writer startSessionAtSourceTime:timeStamp];
while(audioSamples.count > 0){
if(writer && writerAudioInput.readyForMoreMediaData){
CMSampleBufferRef audioBufferRef = (__bridge CMSampleBufferRef)audioSamples[audioSamples.count - 1];
[writerAudioInput appendSampleBuffer:audioBufferRef];
[audioSamples removeObjectAtIndex:audioSamples.count - 1];
}
else{
[NSThread sleepForTimeInterval:0.2];
}
}
if(writer.status != AVAssetWriterStatusCancelled){
[writerAudioInput markAsFinished];
[writer finishWritingWithCompletionHandler:^{
}];
}
}
You are not reversing audio, you just reversing audio fragments (buffers) order.
So you have this input: S1, S2, S3, S4 and you produce following output: S4, S3, S2, S1. But inside this fragments you still have original order of frames.
You need to reverse buffer data too.
Update #1
Here is example how you can do this.
- (void)reverseAudio:(AVURLAsset *)videoAsset {
AVAssetReader *video2AssetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
video2AssetReader.timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero, [videoAsset duration]);
NSArray *audioTracks = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
NSDictionary *outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
nil];
AVAssetReaderTrackOutput *readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:outputSettingsDict];
[video2AssetReader addOutput:readerAudioTrackOutput];
[video2AssetReader startReading];
// read in the samples
CMTime timeStamp = kCMTimeInvalid;
NSMutableArray *audioSamples = [[NSMutableArray alloc] init];
CMSampleBufferRef audioSample;
while ((audioSample = [readerAudioTrackOutput copyNextSampleBuffer])) {
[audioSamples addObject:(__bridge id)[self reverseSampleBuffer:audioSample]];
if (CMTIME_IS_INVALID(timeStamp)) {
timeStamp = CMSampleBufferGetPresentationTimeStamp(audioSample);
}
CFRelease(audioSample);
}
NSLog(#"AUDIO SAMPLES COUNT = %d", (int)audioSamples.count);
[video2AssetReader cancelReading];
// rest of the code
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *videoPath = [documentsDirectory stringByAppendingPathComponent:#"videoReverseAudioFile.m4a"];
NSError *error = nil;
if ([[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
[[NSFileManager defaultManager] removeItemAtPath:videoPath error:&error];
if (error) {
NSLog(#"VIDEO DELETE FAILED");
} else {
NSLog(#"VIDEO DELETED");
}
}
NSURL *audioExportURL = [[NSURL alloc] initFileURLWithPath:videoPath];
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:audioExportURL fileType:AVFileTypeAppleM4A error:&error];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSNumber numberWithInt:128000], AVEncoderBitRateKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey, nil];
AVAssetWriterInput *writerAudioInput;
writerAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
writerAudioInput.expectsMediaDataInRealTime = NO;
if ([writer canAddInput:writerAudioInput]) {
[writer addInput:writerAudioInput];
} else {
NSLog(#"ERROR ADDING AUDIO");
}
[writer startWriting];
[writer startSessionAtSourceTime:timeStamp];
while (audioSamples.count > 0) {
if(writer && writerAudioInput.readyForMoreMediaData) {
CMSampleBufferRef audioBufferRef = (__bridge CMSampleBufferRef)audioSamples[audioSamples.count - 1];
[writerAudioInput appendSampleBuffer:audioBufferRef];
[audioSamples removeObjectAtIndex:audioSamples.count - 1];
} else {
[NSThread sleepForTimeInterval:0.2];
}
}
if (writer.status != AVAssetWriterStatusCancelled) {
[writerAudioInput markAsFinished];
[writer finishWritingWithCompletionHandler:^{
}];
}
}
- (CMSampleBufferRef)reverseSampleBuffer:(CMSampleBufferRef)buffer {
AudioBufferList list;
CMBlockBufferRef dataBuffer = NULL;
// TODO check result code
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(buffer,
NULL,
&list,
sizeof(list),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&dataBuffer);
CMItemCount numberOfSamples = CMSampleBufferGetNumSamples(buffer);
for (int i = 0; i < list.mNumberBuffers; i++) {
SInt16 *samples = (SInt16 *)list.mBuffers[i].mData;
for (int j = 0; j < numberOfSamples / 2; j++) {
SInt16 t = samples[j];
samples[j] = samples[numberOfSamples - 1 - j];
samples[numberOfSamples - 1 - j] = t;
}
}
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(buffer);
CMSampleBufferRef result = NULL;
// TODO check result code
CMSampleBufferCreate(kCFAllocatorDefault, dataBuffer, true, NULL, NULL, format, 0, 0, NULL, 0, NULL, &result);
return result;
}

Convert recorded audio file in to .wav format in iOS

I want to record a audio file and upload to server in .wav format, but recorder doesn't allow to me to record a file into wav format.
for recording i have used the code :
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
// Initiate and prepare the recorder
recorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:nil];
recorder.delegate = self;
recorder.meteringEnabled = YES;
[recorder prepareToRecord];
Another way i have found, convert file from caff to wav after recording but this also not working for me.
I have used the code for convert file from caff to wav:
-(BOOL)exportAssetAsWaveFormat:(NSURL*)filePath
{
NSError *error = nil ;
NSDictionary *audioSetting = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[ NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[ NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[ NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[ NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[ NSNumber numberWithBool:0], AVLinearPCMIsBigEndianKey,
[ NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[ NSData data], AVChannelLayoutKey, nil ];
// NSString *audioFilePath = filePath;
AVURLAsset * URLAsset = [[AVURLAsset alloc] initWithURL:recorder.url options:nil];
if (!URLAsset) return NO ;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:URLAsset error:&error];
if (error) return NO;
NSArray *tracks = [URLAsset tracksWithMediaType:AVMediaTypeAudio];
if (![tracks count]) return NO;
AVAssetReaderAudioMixOutput *audioMixOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:tracks
audioSettings :audioSetting];
if (![assetReader canAddOutput:audioMixOutput]) return NO ;
[assetReader addOutput :audioMixOutput];
if (![assetReader startReading]) return NO;
NSString *title = #"WavConverted";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
NSString *outPath = [[docDir stringByAppendingPathComponent :title]
stringByAppendingPathExtension:#"wav" ];
NSURL *outURL = [NSURL fileURLWithPath:outPath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:outURL
fileType:AVFileTypeWAVE
error:&error];
if (error) return NO;
AVAssetWriterInput *assetWriterInput = [ AVAssetWriterInput assetWriterInputWithMediaType :AVMediaTypeAudio
outputSettings:audioSetting];
assetWriterInput. expectsMediaDataInRealTime = NO;
if (![assetWriter canAddInput:assetWriterInput]) return NO ;
[assetWriter addInput :assetWriterInput];
if (![assetWriter startWriting]) return NO;
[assetWriter startSessionAtSourceTime:kCMTimeZero ];
dispatch_queue_t queue = dispatch_queue_create( "assetWriterQueue", NULL );
[assetWriterInput requestMediaDataWhenReadyOnQueue:queue usingBlock:^{
NSLog(#"start");
while (1)
{
if ([assetWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer = [audioMixOutput copyNextSampleBuffer];
if (sampleBuffer) {
[assetWriterInput appendSampleBuffer :sampleBuffer];
CFRelease(sampleBuffer);
} else {
[assetWriterInput markAsFinished];
break;
}
}
}
[assetWriter finishWriting];
NSLog(#"finish %#",assetWriter);
}];
return YES;
// dispatch_release(queue);
}
Thanks in advance.
Any help would be really appreciated.
You are uploading the file to a server, but I recommend you to not use the wav format because it's bigger in size than any other formats. Use caf, m4a, or any other formats instead.
You can record a wav file by setting AVFormatIDKey to kAudioFormatLinearPCM; no other encoding format will work.
NSURL *url = [NSURL fileURLWithPath: outPath];
NSError *err = nil;
audioRecorder = [[ AVAudioRecorder alloc] initWithURL:url
settings: audioSetting
error:&err];
//prepare to record
[audioRecorder setDelegate:self];
[audioRecorder prepareToRecord];
audioRecorder.meteringEnabled = YES;
[audioRecorder recordForDuration:(NSTimeInterval)10000000000];
[audioRecorder record];
Try this
NSString *wavFilePath = [[NSBundle mainBundle] pathForResource:#"sampleaudio" ofType:#"wav"];
NSURL *assetURL = [NSURL fileURLWithPath:wavFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError]
;
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSString *strcafFileName = [NSString stringWithFormat:#"%#.caf",[wavFilePath stringByDeletingPathExtension]];
NSString *cafFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strcafFileName];
NSURL *exportURL = [NSURL fileURLWithPath:cafFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeCoreAudioFormat
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:11025], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog(#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
CMSampleBufferInvalidate(nextBuffer);
CFRelease(nextBuffer);
nextBuffer = NULL;
}
else
{
[assetWriterInput markAsFinished];
// [assetWriter finishWriting];
[assetReader cancelReading];
break;
}
}
}];

iOS - Read a video file frame-by-frame, image processing, then save as new video file

I try to read a video frame-by-frame from iPhone photo album.
After image processing, I will save them as a new video.
I'm running my code without any error, but there is no new video in the album.
Here is my code.
// Video writer init
- (BOOL)setupAssetWriterForURL:(CMFormatDescriptionRef)formatDescription
{
float bitsPerPixel;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
int numPixels = dimensions.width * dimensions.height;
int bitsPerSecond;
if ( numPixels < (640 * 480) )
bitsPerPixel = 4.05;
else
bitsPerPixel = 11.4;
bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:dimensions.width], AVVideoWidthKey,
[NSNumber numberWithInteger:dimensions.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
[NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
if ([assetWriter canApplyOutputSettings:videoCompressionSettings forMediaType:AVMediaTypeVideo]) {
assetWriterVideoIn = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
assetWriterVideoIn.expectsMediaDataInRealTime = YES;
assetWriterVideoIn.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation];
if ([assetWriter canAddInput:assetWriterVideoIn])
[assetWriter addInput:assetWriterVideoIn];
else {
NSLog(#"Couldn't add asset writer video input.");
return NO;
}
}
else {
NSLog(#"Couldn't apply video output settings.");
return NO;
}
return YES;
}
Read video
- (void)readMovie:(NSURL *)url
{
AVURLAsset * asset = [AVURLAsset URLAssetWithURL:url options:nil];
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:#"tracks"] completionHandler:
^{
dispatch_async(dispatch_get_main_queue(),
^{
AVAssetTrack * videoTrack = nil;
NSArray * tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] == 1)
{
videoTrack = [tracks objectAtIndex:0];
NSError * error = nil;
// _movieReader is a member variable
AVAssetReader *movieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
if (error)
NSLog(#"_movieReader fail!\n");
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings =
[NSDictionary dictionaryWithObject:value forKey:key];
[movieReader addOutput:[AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:videoTrack
outputSettings:videoSettings]];
[movieReader startReading];
while ([movieReader status] == AVAssetReaderStatusReading)
{
AVAssetReaderTrackOutput * output = [movieReader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
if (sampleBuffer)
{
if ( !assetWriter ) {
outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/%llu.mov", NSTemporaryDirectory(), mach_absolute_time()]];
NSError *error = nil;
assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:(NSString *)kUTTypeQuickTimeMovie error:&error];
if (error)
[self showError:error];
if (assetWriter) {
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setupAssetWriterForURL:formatDescription];
}
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
int bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
int bufferWidth = CVPixelBufferGetWidth(imageBuffer);
int bufferHeight = CVPixelBufferGetHeight(imageBuffer);
unsigned char *pixel = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
for( int row = 0; row < bufferHeight; row++ ) {
for( int column = 0; column < bufferWidth; column++ ) {
pixel[0] = (pixel[0]+pixel[1]+pixel[2])/3;
pixel[1] = (pixel[0]+pixel[1]+pixel[2])/3;
pixel[2] = (pixel[0]+pixel[1]+pixel[2])/3;
pixel += 4;
}
}
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
if ( assetWriter ) {
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}
CFRelease(sampleBuffer);
}
}
if (assetWriter) {
[assetWriterVideoIn markAsFinished];
assetWriter = nil;
[assetWriter finishWriting];
assetWriterVideoIn = nil;
assetWriter = nil;
[self saveMovieToCameraRoll];
}
else {
[self showError:[assetWriter error]];
}
}
});
}];
}
- (void) writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType
{
if ( assetWriter.status == AVAssetWriterStatusUnknown ) {
if ([assetWriter startWriting]) {
[assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
}
else {
[self showError:[assetWriter error]];
}
}
if ( assetWriter.status == AVAssetWriterStatusWriting ) {
if (mediaType == AVMediaTypeVideo) {
if (assetWriterVideoIn.readyForMoreMediaData) {
if (![assetWriterVideoIn appendSampleBuffer:sampleBuffer]) {
[self showError:[assetWriter error]];
}
}
}
}
}
- (void)saveMovieToCameraRoll
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error){
[self showError:error];
NSLog(#"save fail");
}
else
{
[self removeFile:outputURL];
NSLog(#"!!!");
}
});
}];
}
- (void)removeFile:(NSURL *)fileURL
{
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *filePath = [fileURL path];
if ([fileManager fileExistsAtPath:filePath]) {
NSError *error;
BOOL success = [fileManager removeItemAtPath:filePath error:&error];
if (!success)
[self showError:error];
}
}
Any suggestions?
I am a bit late but it might be helpful for others that code is almost right but simply comment one line of code it will work, which is in readMovie: method
//assetWriter = nil; commented line
[assetWriter finishWriting];
assetWriterVideoIn = nil;
assetWriter = nil;
[self saveMovieToCameraRoll];
}`
And Here is an answer for Creating Movie from images.
Hope You will get Some Help from there.

how to save video programmatically on iPad1 iOS SDK

i have array of images and i converted them to movie video and now i want to now how to save that converted video to ipad.
can i save that converted video to iPad photo library
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
CFDataRef imgData = (CFDataRef)[array objectAtIndex:0];
CGDataProviderRef imgDataProvider = CGDataProviderCreateWithCFData (imgData);
CGImageRef image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault);
buffer = [self pixelBufferFromCGImage:image1];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:......
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
Try this open source component:-
https://www.cocoacontrols.com/controls/iqprojectvideo
This might help you. Use it according to your requirement.
Try with following code :
[_operationQueue addOperationWithBlock:^{
NSInteger i = 0;
NSString *path = [NSTemporaryDirectory() stringByAppendingFormat:#"%d.png",i];
UIImage *image;
NSDate *startDate;
while ((image = [UIImage imageWithContentsOfFile:path]))
{
while (1)
{
if (writerInput.readyForMoreMediaData == NO)
{
sleep(0.01);
continue;
}
else
{
//First time only
if (buffer == NULL)
{
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
startDate = [_dates objectAtIndex:i];
}
buffer = [IQProjectVideo pixelBufferFromCGImage:image.CGImage];
if (buffer)
{
if(i<_dates.count){
NSDate *currentDate = [_dates objectAtIndex:i];
Float64 interval = [currentDate timeIntervalSinceDate:startDate];
int32_t timeScale;
if (i == 0)
{
timeScale = 1.0/([[_dates objectAtIndex:i+1] timeIntervalSinceDate:currentDate]);
}
else
{
timeScale = 1.0/([currentDate timeIntervalSinceDate:[_dates objectAtIndex:i-1]]);
}
/**/
CMTime presentTime=CMTimeMakeWithSeconds(interval, MAX(33, timeScale));
// NSLog(#"presentTime:%#",(__bridge NSString *)CMTimeCopyDescription(kCFAllocatorDefault, presentTime));
if (_progressBlock != NULL)
{
dispatch_sync(dispatch_get_main_queue(), ^{
_progressBlock((CGFloat)i/(CGFloat)currentIndex);
});
}
// append buffer
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
CVPixelBufferRelease(buffer);
}
}
break;
}
}
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
path = [NSTemporaryDirectory() stringByAppendingFormat:#"%d.png",++i];
}
//Finish the session:
[writerInput markAsFinished];
if ([videoWriter respondsToSelector:#selector(finishWritingWithCompletionHandler:)])
{
[videoWriter finishWritingWithCompletionHandler:^{
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}];
}
else
{
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}
NSDictionary *fileAttrubutes = [[NSFileManager defaultManager] attributesOfItemAtPath:_path error:nil];
NSDictionary *dictInfo = [NSDictionary dictionaryWithObjectsAndKeys:
_path,IQFilePathKey,
[fileAttrubutes objectForKey:NSFileSize], IQFileSizeKey,
[fileAttrubutes objectForKey:NSFileCreationDate], IQFileCreateDateKey,
nil];
if (_completionBlock != NULL)
{
dispatch_sync(dispatch_get_main_queue(), ^{
_completionBlock(dictInfo,videoWriter.error);
});
}
NSString *openCommand = [NSString stringWithFormat:#"/usr/bin/open \"%#\"", NSTemporaryDirectory()];
system([openCommand fileSystemRepresentation]);

Resources