Related
I am recording a video in my app whose recording time is sent by the server. The server tests for certain gestures in the video and if it fails sends back a flag asking to re-record and resend.
Problem: Initial video is of correct duration. But the videos, in the event of server 'retry request' are appended with a black screen for a certain amount of time and then the recorded video plays.
flow:
- (void)viewDidLoad {
[super viewDidLoad];
// get recording time
[self getRecordingData];
}
recording function:
-(void)startRecording {
self.isRecording = YES;
self.isProcessing = YES;
[self startWritingToVideoFile];
[self logicMethodToStopRecording];
}
writing to video file function:
-(void)startWritingToVideoFile{
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:480], AVVideoHeightKey, AVVideoCodecTypeH264, AVVideoCodecKey,nil];
self.assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
[self.assetWriterInput setTransform: CGAffineTransformMakeRotation( ( 90 * M_PI ) / 180 )];
self.pixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:self.assetWriterInput
sourcePixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
kCVPixelBufferPixelFormatTypeKey,
nil]];
self.videoPath = [Utilities pathForTemporaryFileWithSuffix:#"mov"];
NSURL *videoURL = [NSURL fileURLWithPath:self.videoPath];
/* Asset writer with MPEG4 format*/
self.assetWriterMyData = [[AVAssetWriter alloc]
initWithURL: videoURL
fileType:AVFileTypeMPEG4
error:nil];
[self.assetWriterMyData addInput:self.assetWriterInput];
self.assetWriterInput.expectsMediaDataInRealTime = YES;
[self.assetWriterMyData startWriting];
[self.assetWriterMyData startSessionAtSourceTime:kCMTimeZero];
self.isReadyToWrite = YES;
}
logic method to stop recording:
-(void)logicMethodToStopRecording{
float time = [self.timeFromServer floatValue];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, time * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self stopRecording];
});
}
actual stop recording Method:
-(void)stopRecording{
[self stopWritingToVideoFile];
}
-(void)stopWritingToVideoFile {
[self.assetWriterMyData finishWritingWithCompletionHandler:^{
[self showLoading];
if(!self.continueRunning){
return;
}
[self sendVideo];
// sends video file referenced in startWritingToVideoFile method
}];
}
The sending video method:
-(void)sendVideo{
[self.xxxxxx sendAPI:self.userToVerifyUserId videoPath:self.videoPath callback:^(NSString *jsonResponse){
[self handleResponse:jsonResponse];
}];
}
The response is handled by the following method:
-(void)handleResponse: (NSString*)result{
[Utilities deleteFile:self.videoPath];
//if no success but retry is true. re-record and re-send.
if(!self.success && retry){
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 3.0 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self startRecording]; // start recording called again
});
}
The initial file I create has suppose x seconds of video ---- it is being generated correctly
However, during re-attempts:
I am deleting the old file and recreating new files. Still my video output in the second and third attempts have black screen for n+x and 2n+x times. Please help.
************** EDIT ****************************************************
Additionally the callback method for AVCaptureSession does the following:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// a very dense way to keep track of the time at which this frame
// occurs relative to the output stream, but it's just an example!
static int64_t frameNumber = 0;
if(self.assetWriterInput.readyForMoreMediaData){
if(self.pixelBufferAdaptor != nil){
[self.pixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:CMTimeMake(frameNumber, 25)];
frameNumber++;
}
}
}
}
**************************EDIT - 2 *********************************************
+(NSString *)pathForTemporaryFileWithSuffix:(NSString *)suffix
{
NSString * result;
CFUUIDRef uuid;
CFStringRef uuidStr;
uuid = CFUUIDCreate(NULL);
assert(uuid != NULL);
uuidStr = CFUUIDCreateString(NULL, uuid);
assert(uuidStr != NULL);
result = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.%#", uuidStr, suffix]];
assert(result != nil);
CFRelease(uuidStr);
CFRelease(uuid);
return result;
}
I'm using AVFoundation. I wanna to record video using both (front and Back side) camera. I record video on one side when i change the camera mode back to front, the camera still freeze. Is it possible to record video continuously on both side.
Sample Code:
- (void) startup
{
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self frontCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (AVCaptureDevice *)frontCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)backCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionBack) {
return device;
}
}
return nil;
}
- (void) startupFront
{
_session = nil;
[_session stopRunning];
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self backCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (void) startCapture
{
#synchronized(self)
{
if (!self.isCapturing)
{
NSLog(#"starting capture");
// create the encoder once we have the audio params
_encoder = nil;
self.isPaused = NO;
_discont = NO;
_timeOffset = CMTimeMake(0, 0);
self.isCapturing = YES;
}
}
}
- (void) stopCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSURL* url = [NSURL fileURLWithPath:path];
_currentFile++;
// serialize with audio and video capture
self.isCapturing = NO;
dispatch_async(_captureQueue, ^{
[_encoder finishWithCompletionHandler:^{
self.isCapturing = NO;
_encoder = nil;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
NSLog(#"save completed");
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}];
}];
});
}
}
}
- (void) pauseCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSLog(#"Pausing capture");
self.isPaused = YES;
_discont = YES;
}
}
}
- (void) resumeCapture
{
#synchronized(self)
{
if (self.isPaused)
{
NSLog(#"Resuming capture");
self.isPaused = NO;
}
}
}
- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
- (void) setAudioFormat:(CMFormatDescriptionRef) fmt
{
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
_samplerate = asbd->mSampleRate;
_channels = asbd->mChannelsPerFrame;
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
BOOL bVideo = YES;
#synchronized(self)
{
if (!self.isCapturing || self.isPaused)
{
return;
}
if (connection != _videoConnection)
{
bVideo = NO;
}
if ((_encoder == nil) && !bVideo)
{
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
_encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate];
}
if (_discont)
{
if (bVideo)
{
return;
}
_discont = NO;
// calc adjustment
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = bVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid)
{
if (_timeOffset.flags & kCMTimeFlags_Valid)
{
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
NSLog(#"Setting offset from %s", bVideo?"video": "audio");
NSLog(#"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));
// this stops us having to set a scale for _timeOffset before we see the first video time
if (_timeOffset.value == 0)
{
_timeOffset = offset;
}
else
{
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// retain so that we can release either this or modified one
CFRetain(sampleBuffer);
if (_timeOffset.value > 0)
{
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
// record most recent time so we know the length of the pause
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0)
{
pts = CMTimeAdd(pts, dur);
}
if (bVideo)
{
_lastVideo = pts;
}
else
{
_lastAudio = pts;
}
}
// pass frame to encoder
[_encoder encodeFrame:sampleBuffer isVideo:bVideo];
CFRelease(sampleBuffer);
}
- (void) shutdown
{
NSLog(#"shutting down server");
if (_session)
{
[_session stopRunning];
_session = nil;
}
[_encoder finishWithCompletionHandler:^{
NSLog(#"Capture completed");
}];
}
According to me. it is not possible, continue recording when we switch the camera,
because, there resolution and quality difference between them, a video can have only one resolution and quality throughout the video.
and secondly, every time you switch between camera it'll alloc and initialize the camera.
unfortunately its not possible according to me.
but if you find solution, do tell me please.
I'm coding an iPad app in which I need to join mp4 file with different resolution. To do so I'm using a combination of AVAssetReader to read the mp4 source files and AVAssetWriter to write those source files in a single mp4 output file.
I've tried to use AVAssetExportSession but the problem I had was there was black frames between the different joined files.
The problem I'm facing now is that everything seems OK but the completion handler of the AVAssetWriter is never called.
Here is my selector taking as input a list of mp4 file URL, a single output file URL and a completion handler.
- (void)resizeAndJoinVideosAtURLs:(NSArray *)videoURLs toOutputURL:(NSURL *)outputURL withHandler:(void(^)(NSURL *fileURL))handler
{
/*
First step: create the writer and writer input
*/
NSError *error = nil;
self.videoAssetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:&error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:640], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = NO;
if([self.videoAssetWriter canAddInput:videoWriterInput])
{
[self.videoAssetWriter addInput:videoWriterInput];
[self.videoAssetWriter startWriting];
[self.videoAssetWriter startSessionAtSourceTime:kCMTimeZero];
/*
Second step: for each video URL given create a reader and an reader input
*/
for(NSURL *videoURL in videoURLs)
{
NSLog(#"Processing file: %#",videoURL);
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
AVAssetReader *videoAssetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:&error];
AVAssetTrack *videoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;
if([videoAssetReader canAddOutput:videoAssetTrackOutput])
{
[videoAssetReader addOutput:videoAssetTrackOutput];
[videoAssetReader startReading];
/*
Step three: copy the buffers from the reader to the writer
*/
while ([videoAssetReader status] == AVAssetReaderStatusReading)
{
if(![videoWriterInput isReadyForMoreMediaData]) continue;
CMSampleBufferRef buffer = [videoAssetTrackOutput copyNextSampleBuffer];
if(buffer)
{
[videoWriterInput appendSampleBuffer:buffer];
CFRelease(buffer);
}
}
} else NSLog(#"ERROR: %#",error);
}
[videoWriterInput markAsFinished];
} else NSLog(#"ERROR: %#",error);
__weak ClipBuilder *weakself = self;
[self.videoAssetWriter finishWritingWithCompletionHandler:^{
handler(outputURL);
weakself.videoAssetWriter = nil;
}];
}
My output file exist and the AVAssetWriter exist since it is a property but still the completion handler is not called. What can explain this?
Thanks for your help.
What can explain that?
Here is the solution I finally implemented to join mp4 file with different resolution with a combination of AVAssetReader / AVAssetWriter.
- (void)reencodeComposition:(AVComposition *)composition toMP4File:(NSURL *)mp4FileURL withCompletionHandler:(void (^)(void))handler
{
self.status = EncoderStatusEncoding;
/*
Create the asset writer to write the file on disk
*/
NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:mp4FileURL.path isDirectory:nil])
{
if(![[NSFileManager defaultManager] removeItemAtPath:mp4FileURL.path error:&error])
{
[self failWithError:error withCompletionHandler:handler];
return;
}
}
self.assetWriter = [[AVAssetWriter alloc] initWithURL:mp4FileURL fileType:AVFileTypeMPEG4 error:&error];
if(self.assetWriter)
{
/*
Get the audio and video track of the composition
*/
AVAssetTrack *videoAssetTrack = [composition tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *audioAssetTrack = [composition tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSDictionary *videoSettings = #{AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey:#(self.imageWidth), AVVideoHeightKey:#(self.imageHeight)};
/*
Add an input to be able to write the video in the file
*/
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
if([self.assetWriter canAddInput:videoWriterInput])
{
[self.assetWriter addInput:videoWriterInput];
/*
Add an input to be able to write the audio in the file
*/
// Use this only if you know the format
// CMFormatDescriptionRef audio_fmt_desc_ = nil;
//
// AudioStreamBasicDescription audioFormat;
// bzero(&audioFormat, sizeof(audioFormat));
// audioFormat.mSampleRate = 44100;
// audioFormat.mFormatID = kAudioFormatMPEG4AAC;
// audioFormat.mFramesPerPacket = 1024;
// audioFormat.mChannelsPerFrame = 2;
// int bytes_per_sample = sizeof(float);
// audioFormat.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked;
//
// audioFormat.mBitsPerChannel = bytes_per_sample * 8;
// audioFormat.mBytesPerPacket = bytes_per_sample * 2;
// audioFormat.mBytesPerFrame = bytes_per_sample * 2;
//
// CMAudioFormatDescriptionCreate(kCFAllocatorDefault,&audioFormat,0,NULL,0,NULL,NULL,&audio_fmt_desc_);
//
// AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:audio_fmt_desc_];
//
// AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:((__bridge CMAudioFormatDescriptionRef)audioAssetTrack.formatDescriptions.firstObject)];
audioWriterInput.expectsMediaDataInRealTime = YES;
if([self.assetWriter canAddInput:audioWriterInput])
{
[self.assetWriter addInput:audioWriterInput];
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
/*
Create the asset reader to read the mp4 files on the disk
*/
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:composition error:&error];
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
/*
Add an output to be able to retrieve the video in the files
*/
AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;
if([assetReader canAddOutput:videoAssetTrackOutput])
{
[assetReader addOutput:videoAssetTrackOutput];
/*
Add an output to be able to retrieve the video in the files
*/
AVAssetReaderTrackOutput *audioAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:audioAssetTrack outputSettings:nil];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;
if([assetReader canAddOutput:audioAssetTrackOutput])
{
[assetReader addOutput:audioAssetTrackOutput];
[assetReader startReading];
/*
Read the mp4 files until the end and copy them in the output file
*/
dispatch_group_t encodingGroup = dispatch_group_create();
dispatch_group_enter(encodingGroup);
[audioWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
while ([audioWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef nextSampleBuffer = [audioAssetTrackOutput copyNextSampleBuffer];
if (nextSampleBuffer)
{
[audioWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
dispatch_group_leave(encodingGroup);
break;
}
}
}];
dispatch_group_enter(encodingGroup);
[videoWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef nextSampleBuffer = [videoAssetTrackOutput copyNextSampleBuffer];
if (nextSampleBuffer)
{
[videoWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
dispatch_group_leave(encodingGroup);
break;
}
}
}];
dispatch_group_wait(encodingGroup, DISPATCH_TIME_FOREVER);
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
__weak Encoder *weakself = self;
[self.assetWriter finishWritingWithCompletionHandler:^{
self.status = EncoderStatusCompleted;
handler();
weakself.assetWriter = nil;
self.encodingQueue = nil;
}];
}
else [self failWithError:error withCompletionHandler:handler];
}
With
- (dispatch_queue_t)encodingQueue
{
if(!_encodingQueue)
{
_encodingQueue = dispatch_queue_create("com.myProject.encoding", NULL);
}
return _encodingQueue;
}
This implementation was for my project TS2MP4 but I won't need it finally.
I am writing a videocapture app for ios 4+. It works fine on devices with ios 5+ but in ios 4+ the delegate didFinishRecordingToOutputFileAtURL is not being called after the recording has stopped. I have checked apple's reference which says "This method is always called for each recording request, even if no data is successfully written to the file."
https://developer.apple.com/library/ios/#documentation/AVFoundation/Reference/AVCaptureFileOutputRecordingDelegate_Protocol/Reference/Reference.html
Any suggestions ?
Here is the complete code:
/
/
// HomeViewController.m
// MyAgingBooth
//
// Created by Mahmud on 29/10/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "HomeViewController.h"
#import "Globals.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import <MediaPlayer/MPMoviePlayerController.h>
#import "SharedData.h"
#import "ResultViewController.h"
#implementation HomeViewController
#synthesize BtnFromCamera, PreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
isPlaying=NO;
playerScore=0;
playerTurn=0;
}
return self;
}
- (void)dealloc
{
//[levelTimer release];
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.navigationController.navigationBar.barStyle = UIBarStyleBlackTranslucent;
playerName.text=[NSString stringWithFormat: #"Player %d", (playerTurn+1)];
//add a right bar button item proceed to next.
UIBarButtonItem *proceedButton = [[UIBarButtonItem alloc] initWithTitle:#"Proceed" style:UIBarButtonItemStylePlain target:self action:#selector(proceedToNext)];
//[proceedButton setImage:[UIImage imageNamed:#"info.png"]];
self.navigationItem.rightBarButtonItem=proceedButton;
[proceedButton release];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateSelected];
NSArray *words=[NSArray arrayWithObjects:#"SAY: Girls",#"SAY: Shut up", #"SAY: Tiger",#"SAY: Absurd",#"SAY: Tonight", #"SAY: Amstardam", nil];
[word setText:[words objectAtIndex:arc4random()%6]];
[self initCaptureSession];
}
-(void) proceedToNext
{
self.title=#"Back";
ResultViewController *resultViewController= [[ResultViewController alloc] initWithNibName:#"ResultViewController" bundle:nil];
[self.navigationController pushViewController:resultViewController animated:YES];
[resultViewController release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//Action handlers for the buttons
// take snap with camera
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession] autorelease]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(#"Display the preview layer");
CGRect layerRect = CGRectMake(10,44,300,290); //[[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
//[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMinFrameDuration)
{
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[MovieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (IBAction) StartVideo
{
if (!isPlaying) {
self.navigationItem.rightBarButtonItem.enabled=NO;
[BtnFromCamera setTitle:#"Stop" forState:UIControlStateNormal];
playerName.text=[NSString stringWithFormat:#"Player %d", playerTurn+1];
playerScore=0;
count=0;
isPlaying=YES;
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
[outputPath release];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[outputURL release];
//NSString *DestFilename = # "output.mov";
//Set the file save to URL
/* NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSURL* saveLocationURL = [[NSURL alloc] initFileURLWithPath:destinationPath];
[MovieFileOutput startRecordingToOutputFileURL:saveLocationURL recordingDelegate:self];
[saveLocationURL release]; */
levelTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target: self selector: #selector(levelTimerCallback:) userInfo: nil repeats: YES];
}
else
{
isPlaying=NO;
NSLog(#"STOP RECORDING");
[MovieFileOutput stopRecording];
[levelTimer invalidate];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
self.navigationItem.rightBarButtonItem.enabled=YES;
}
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score", assetURL, #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score",assetURL , #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}
[library release];
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (void)levelTimerCallback:(NSTimer *)timer {
AVCaptureConnection *audioConnection = [self connectionWithMediaType:AVMediaTypeAudio fromConnections:[MovieFileOutput connections]];
//return [audioConnection isActive];
for (AVCaptureAudioChannel *channel in audioConnection.audioChannels) {
float avg = channel.averagePowerLevel;
// float peak = channel.peakHoldLevel;
float vol=powf(10, avg)*1000;
NSLog(#"Power: %f",vol);
if (isPlaying && vol > 0) {
playerScore=playerScore+vol;
count=count+1;
}
}
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#end
Fixed the issue. I accedentally added an extra audio output. Removing the following fragment works for me.
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}
I want to change the container of .mov video files that I pick using
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was 3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}