I am trying to record audio in a file for 14 days. I am getting audio data in PCM format through screen mirroring in which audio is received and from this I am converting it to CMSampleBuffer using this code
-(void)createAudioSampleBufferFromData:(uint8_t*)buf withSize:(uint32_t)buflen timeStamp:(double)timeStamp{
OSStatus status;
AudioStreamBasicDescription monoStreamFormat;
memset(&monoStreamFormat, 0, sizeof(monoStreamFormat));
monoStreamFormat.mFormatID = kAudioFormatLinearPCM;
monoStreamFormat.mSampleRate = 44100;
monoStreamFormat.mChannelsPerFrame = 2;
monoStreamFormat.mBitsPerChannel = 16;
monoStreamFormat.mFramesPerPacket = 1; // uncompressed audio
monoStreamFormat.mBytesPerFrame =monoStreamFormat.mChannelsPerFrame * monoStreamFormat.mBitsPerChannel/8;
monoStreamFormat.mBytesPerPacket = monoStreamFormat.mBytesPerFrame * monoStreamFormat.mFramesPerPacket;
monoStreamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
CMFormatDescriptionRef format = NULL;
status = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &monoStreamFormat, 0, NULL, 0, NULL, NULL, &format);
if (status != noErr) {
// really shouldn't happen
return;
}
long blockLength = buflen;
CMSampleBufferRef sampleBuffer = NULL;
CMBlockBufferRef blockBuffer = NULL;
status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, buf, // memoryBlock to hold buffered data
blockLength, // block length of the mem block in bytes.
kCFAllocatorDefault, NULL,
0, // offsetToData
blockLength, // dataLength of relevant bytes, starting at offsetToData
0, &blockBuffer);
if(status == noErr)
{
CMSampleTimingInfo timings = {CMTimeMake(1, 44100.0), CMClockGetTime(CMClockGetHostTimeClock()), kCMTimeInvalid };
const size_t sampleSize = blockLength;
status = CMSampleBufferCreate(kCFAllocatorDefault,
NULL, true, NULL, NULL,
format, 1, 0, &timings, 1,
&sampleSize, &sampleBuffer);
CMItemCount itemCount = CMSampleBufferGetNumSamples(sampleBuffer);
[self notifyAudioBufferReceived:sampleBuffer];
// if(blockBuffer)
// CFRelease(blockBuffer);
}
}
And in following method this CMSampleBuffers are receiving:
- (void)airPlayServer:(id)server audioBufferReceived:(CMSampleBufferRef)buffer {
if(recordingStarted){
[decoderAndRecorder.recorder newAudioSample:buffer];
}
// CFRelease(buffer);
}
from here these buffers go to AVAssetWriter class where I am appending these buffer to record audio into file. AVAssetWriter class is following:
- (instancetype)init
{
self = [super init];
if (self) {
// Create asset writer
dateString = [self dateInString];
NSString *documents = [NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex: 0];
filePath = [documents stringByAppendingPathComponent:[NSString stringWithFormat:#"screen recording - %#.caf",dateString]];
if ([[NSFileManager defaultManager] fileExistsAtPath:filePath]) {
[[NSFileManager defaultManager] removeItemAtPath:filePath
error:nil];
}
NSLog(#"%#", filePath);
NSError *error = nil;
_assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:filePath]
fileType:AVFileTypeCoreAudioFormat
error:&error];
AudioChannelLayout acl;
memset(&acl,0, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatLinearPCM ], AVFormatIDKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[ NSNumber numberWithInt: 16 ], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil ];
_audioWriterInput = [[AVAssetWriterInput alloc]
initWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(_audioWriterInput);
NSParameterAssert([_assetWriter canAddInput:_audioWriterInput]);
[_assetWriter addInput:_audioWriterInput];
}
}
return self;
}
-(void)startSession{
//This is vedio recorder code
_isStarted =[_assetWriter startWriting];
if(_isStarted)
{
NSLog(#"======== Session started");
[_assetWriter startSessionAtSourceTime:_startingTime];
}
else {
NSLog(#"======== Session failed : %#", _assetWriter.error);
}
}
-(void)endSession
{
_isStarted = NO;
[_audioWriterInput markAsFinished];
[_assetWriter finishWritingWithCompletionHandler:^{
NSLog(#"====== File Save Successfully =======");
}];
}
- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if (_isStarted) {
if (_videoWriter.status == AVAssetWriterStatusWriting) {
// if(_audioWriterInput.readyForMoreMediaData){
//NSLog(#"Warning: writer status is %d", _videoWriter.status)
if(sampleBuffer){
if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) {
NSLog(#"Unable to write to audio input");
}else{
NSLog(#"====Audio is being recorded===");
}
}
// }
}else if(_videoWriter.status == AVAssetWriterStatusFailed){
NSLog(#"<----- Appending failed ----->");
NSLog(#"Error: %#", _videoWriter.error);
}
}
}
That's all. File is created but with zero bytes. No data is written in it. Please help with with this problem.
Related
I want to record Uiview in ios .. I have tried but not getting clear Video.. screenshot is here :
[1]: https://i.stack.imgur.com/Gmwkr.png please suggest .. it looks like there is problem in frame i am passing..
#import "screerecorder.h"
#import <QuartzCore/QuartzCore.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface screerecorder(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
#end
#implementation screerecorder
#synthesize currentScreen, frameRate, delegate;
- (void) initialize {
// Initialization code
self.clearsContextBeforeDrawing = YES;
self.currentScreen = nil;
self.frameRate = 10.0f; //10 frames per seconds
_recording = false;
videoWriter = nil;
videoWriterInput = nil;
avAdaptor = nil;
startedAt = nil;
bitmapData = NULL;
}
- (id) initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self initialize];
}
return self;
}
- (id) init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
context = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(context,NO);
if (context== NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease( colorSpace );
return context;
}
//static int frameCount = 0; //debugging
- (void) drawRect:(CGRect)rect {
NSDate* start = [NSDate date];
CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
//not sure why this is necessary...image renders upside-down and mirrored
// CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
//CGContextConcatCTM(context, flipVertical);
[self.layer renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
UIImage* background = [UIImage imageWithCGImage: cgImage];
// CGImageRelease(cgImage);
self.currentScreen = background;
//debugging
//if (frameCount < 40) {
// NSString* filename = [NSString stringWithFormat:#"Documents/frame_%d.png", frameCount];
// NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
// [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
// frameCount++;
//}
//NOTE: to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
// 'setNeedsDisplay' on the ScreenCaptureView.
if (_recording) {
float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
[self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
}
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
CGContextRelease(context);
//redraw at the specified framerate
[self performSelector:#selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}
- (void) cleanupWriter {
avAdaptor = nil;
videoWriterInput = nil;
videoWriter = nil;
startedAt = nil;
if (bitmapData != NULL) {
free(bitmapData);
bitmapData = NULL;
}
}
- (void)dealloc {
[self cleanupWriter];
}
- (NSURL*) tempFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
return outputURL;
}
-(BOOL) setUpWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] ;
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes] ;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (void) completeRecordingSession {
[videoWriterInput markAsFinished];
// Wait for the video
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown) {
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
#synchronized(self) {
BOOL success = [videoWriter finishWriting];
if (!success) {
NSLog(#"finishWriting returned NO");
}
[self cleanupWriter];
id delegateObj = self.delegate;
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Completed recording, file is stored at: %#", outputURL);
obj = [[ViewController alloc]init];
[obj mergeAudioandvideo:outputURL];
if ([delegateObj respondsToSelector:#selector(recordingFinished:)]) {
[delegateObj performSelectorOnMainThread:#selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
}
}
}
- (bool) startRecording {
bool result = NO;
#synchronized(self) {
if (! _recording) {
result = [self setUpWriter];
startedAt = [NSDate date];
_recording = true;
}
}
return result;
}
- (void) stopRecording {
#synchronized(self) {
if (_recording) {
_recording = false;
[self completeRecordingSession];
}
}
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
}
else {
#synchronized (self) {
UIImage* newFrame = self.currentScreen;
CVPixelBufferRef pixelBuffer = NULL;
CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(#"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels); //XXX: will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
if(status == 0){
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success)
NSLog(#"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
}
}
}
#end
You can try this library - Glimpse. It can record any UIView.
My camera was taking pictures and recording videos perfectly (using AVCaptureMovieFileOutput) and I was able to toggle camera position to front/rear normally. However, as in Instagram, Snapchat, and myriad of other apps, I wanted to also allow the user to toggle camera position while recording the video.
It seems that to be able to achieve such a thing, I need to work with AVCaptureVideoDataOutput instead because it can handle the different frames, but I don't really get it to work. Everything goes fine but after I finish the video, it just don't play and there seems to be no resultant URL from the captureOutput method. Here is my code:
- (void)initialize{
if(!_session) {
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = self.cameraQuality;
// preview layer
CGRect bounds = self.preview.layer.bounds;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.bounds = bounds;
_captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.preview.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevicePosition devicePosition;
switch (self.position) {
case LLCameraPositionRear:
if([self.class isRearCameraAvailable]) {
devicePosition = AVCaptureDevicePositionBack;
} else {
devicePosition = AVCaptureDevicePositionFront;
_position = LLCameraPositionFront;
}
break;
case LLCameraPositionFront:
if([self.class isFrontCameraAvailable]) {
devicePosition = AVCaptureDevicePositionFront;
} else {
devicePosition = AVCaptureDevicePositionBack;
_position = LLCameraPositionRear;
}
break;
default:
devicePosition = AVCaptureDevicePositionUnspecified;
break;
}
if(devicePosition == AVCaptureDevicePositionUnspecified) {
_videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
} else {
_videoCaptureDevice = [self cameraWithPosition:devicePosition];
}
NSError *error = nil;
_videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error];
if (!_videoDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
return;
}
if([self.session canAddInput:_videoDeviceInput]) {
[self.session addInput:_videoDeviceInput];
// self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection];
}
// add audio if video is enabled
if(self.videoEnabled) {
_audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
_audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioCaptureDevice error:&error];
if (!_audioDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
}
if([self.session canAddInput:_audioDeviceInput]) {
[self.session addInput:_audioDeviceInput];
}
// Setup the video output
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
_videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
//[NSDictionary dictionaryWithObject:
//[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// Setup the audio input
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// Create the session
[_session addOutput:_videoOutput];
[_session addOutput:_audioOutput];
// Setup the queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_audioOutput setSampleBufferDelegate:self queue:queue];
}
// continiously adjust white balance
self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
// image output
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.session addOutput:self.stillImageOutput];
}
//if we had disabled the connection on capture, re-enable it
if (![self.captureVideoPreviewLayer.connection isEnabled]) {
[self.captureVideoPreviewLayer.connection setEnabled:YES];
}
// [_assetWriter startWriting];
//[_assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.session startRunning];
}
- (void)stop
{
[self.session stopRunning];
}
-(BOOL) setupWriter:(NSURL*)url {
NSError *error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
// Add video input
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:192], AVVideoWidthKey,
[NSNumber numberWithInt:144], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
} else {
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
}
_audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[_videoWriter addInput:_videoWriterInput];
[_videoWriter addInput:_audioWriterInput];
return YES;
}
-(void) startVideoRecording
{
if( !self.recording )
{
NSURL* url = [[NSURL alloc] initFileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]];
//if(!debug){
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
//}
NSLog(#"start video recording...");
if( ![self setupWriter:url] ) {
NSLog(#"Setup Writer Failed") ;
return;
}
// [_session startRunning] ;
self.recording = YES;
}
}
-(void) stopVideoRecording:(void (^)(LLSimpleCamera *camera, NSURL *outputFileUrl, NSError *error))completionBlock
{
NSLog(#"STOP RECORDING");
if(!self.videoEnabled) {
return;
}
if( self.recording )
{
self.recording = NO;
self.didRecord = completionBlock;
[_session stopRunning] ;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
{
if(![_videoWriter finishWriting]) {
NSLog(#"finishWriting returned NO") ;
}
});
//[_videoWriter endSessionAtSourceTime:lastSampleTime];
//[_videoWriterInput markAsFinished];
//[_audioWriterInput markAsFinished];
NSLog(#"video recording stopped");
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(#"CALLING CAPTUREOUTPUT");
self.recording = NO;
[self enableTorch:NO];
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
NSLog( #"sample buffer is not ready. Skipping sample" );
return;
}
/*if(self.didRecord) {
NSLog(#"DID RECORD EXISTS !!!");
self.didRecord(self, outputFileURL, error);
}*/
//THE ABOVE CODE WOULD GET THE outputFileURL if fromt he captureOutput method delegate if I used AVCaptureMovieFileOutput
if( self.recording == YES )
{
_lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( _videoWriter.status != AVAssetWriterStatusWriting )
{
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:_lastSampleTime];
}
if( captureOutput == _videoOutput )
[self newVideoSample:sampleBuffer];
else if( captureOutput == _audioOutput) {
[self newAudioSample:sampleBuffer];
}
/*
// If I add audio to the video, then the output file gets corrupted and it cannot be reproduced
else
[self newAudioSample:sampleBuffer];
*/
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(#"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", _videoWriter.error);
return;
}
if( ![_videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(#"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", _videoWriter.error);
return;
}
if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to audio input");
}
}
PS1: Here are references to similar questions
Change camera capture device while recording a video
Simultaneous AVCaptureVideoDataOutput and AVCaptureMovieFileOutput
PS2: Sorry for the bad indentation on the code above. The code was perfectly indented but somehow when I post huge clanks of code here, it loses indentation.
I am working on an iOS app that uses AVAudioEngine for various things, including recording audio to a file, applying effects to that audio using audio units, and playing back the audio with the effect applied. I use a tap to also write the output to a file. When this is done it writes to the file in real time as the audio is playing back.
Is it possible to set up an AVAudioEngine graph that reads from a file, processes the sound with an audio unit, and outputs to a file, but faster than real time (ie., as fast as the hardware can process it)? The use case for this would be to output a few minutes of audio with effects applied, and I certainly wouldn't want to wait for a few minutes for it to be processed.
Edit: here's the code that I'm using to set up the AVAudioEngine's graph, and play a sound file:
AVAudioEngine* engine = [[AVAudioEngine alloc] init];
AVAudioPlayerNode* player = [[AVAudioPlayerNode alloc] init];
[engine attachNode:player];
self.player = player;
self.engine = engine;
if (!self.distortionEffect) {
self.distortionEffect = [[AVAudioUnitDistortion alloc] init];
[self.engine attachNode:self.distortionEffect];
[self.engine connect:self.player to:self.distortionEffect format:[self.distortionEffect outputFormatForBus:0]];
AVAudioMixerNode* mixer = [self.engine mainMixerNode];
[self.engine connect:self.distortionEffect to:mixer format:[mixer outputFormatForBus:0]];
}
[self.distortionEffect loadFactoryPreset:AVAudioUnitDistortionPresetDrumsBitBrush];
NSError* error;
if (![self.engine startAndReturnError:&error]) {
NSLog(#"error: %#", error);
} else {
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:#"test2" withExtension:#"mp3"];
AVAudioFile* file = [[AVAudioFile alloc] initForReading:fileURL error:&error];
if (error) {
NSLog(#"error: %#", error);
} else {
[self.player scheduleFile:file atTime:nil completionHandler:nil];
[self.player play];
}
}
The above code plays the sound in the test2.mp3 file, with the AVAudioUnitDistortionPresetDrumsBitBrush distortion preset applied, in real time.
I then modified the above code by adding these lines after [self.player play]:
[self.engine stop];
[self renderAudioAndWriteToFile];
I modified the renderAudioAndWriteToFile method that Vladimir provided so that instead of allocating a new AVAudioEngine in the first line, it simply uses self.engine that has already been set up.
However, in renderAudioAndWriteToFile, it's logging "Can not render audio unit" because AudioUnitRender is returning a status of kAudioUnitErr_Uninitialized.
Edit 2: I should mention that I'm perfectly happy to convert the AVAudioEngine code I posted to use the C apis if that would make things easier. However, I would want the code to produce the same output as the AVAudioEngine code (including the use of the factory preset shown above).
Configure your engine and player node.
Call play method for your player node.
Pause your engine.
Get an audio unit from your AVAudioOutputNode (audioEngine.outputNode)
with this method.
Render from audio unit with AudioUnitRender in cycle and write audio buffer list to file with Extended Audio File Services.
Example:
Audio engine configuration
- (void)configureAudioEngine {
self.engine = [[AVAudioEngine alloc] init];
self.playerNode = [[AVAudioPlayerNode alloc] init];
[self.engine attachNode:self.playerNode];
AVAudioUnitDistortion *distortionEffect = [[AVAudioUnitDistortion alloc] init];
[self.engine attachNode:distortionEffect];
[self.engine connect:self.playerNode to:distortionEffect format:[distortionEffect outputFormatForBus:0]];
self.mixer = [self.engine mainMixerNode];
[self.engine connect:distortionEffect to:self.mixer format:[self.mixer outputFormatForBus:0]];
[distortionEffect loadFactoryPreset:AVAudioUnitDistortionPresetDrumsBitBrush];
NSError* error;
if (![self.engine startAndReturnError:&error])
NSLog(#"Can't start engine: %#", error);
else
[self scheduleFileToPlay];
}
- (void)scheduleFileToPlay {
NSError* error;
NSURL *fileURL = [[NSBundle mainBundle] URLForResource:#"filename" withExtension:#"m4a"];
self.file = [[AVAudioFile alloc] initForReading:fileURL error:&error];
if (self.file)
[self.playerNode scheduleFile:self.file atTime:nil completionHandler:nil];
else
NSLog(#"Can't read file: %#", error);
}
Rendering methods
- (void)renderAudioAndWriteToFile {
[self.playerNode play];
[self.engine pause];
AVAudioOutputNode *outputNode = self.engine.outputNode;
AudioStreamBasicDescription const *audioDescription = [outputNode outputFormatForBus:0].streamDescription;
NSString *path = [self filePath];
ExtAudioFileRef audioFile = [self createAndSetupExtAudioFileWithASBD:audioDescription andFilePath:path];
if (!audioFile)
return;
AVURLAsset *asset = [AVURLAsset assetWithURL:self.file.url];
NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
NSUInteger lengthInFrames = duration * audioDescription->mSampleRate;
const NSUInteger kBufferLength = 4096;
AudioBufferList *bufferList = AEAllocateAndInitAudioBufferList(*audioDescription, kBufferLength);
AudioTimeStamp timeStamp;
memset (&timeStamp, 0, sizeof(timeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
OSStatus status = noErr;
for (NSUInteger i = kBufferLength; i < lengthInFrames; i += kBufferLength) {
status = [self renderToBufferList:bufferList writeToFile:audioFile bufferLength:kBufferLength timeStamp:&timeStamp];
if (status != noErr)
break;
}
if (status == noErr && timeStamp.mSampleTime < lengthInFrames) {
NSUInteger restBufferLength = (NSUInteger) (lengthInFrames - timeStamp.mSampleTime);
AudioBufferList *restBufferList = AEAllocateAndInitAudioBufferList(*audioDescription, restBufferLength);
status = [self renderToBufferList:restBufferList writeToFile:audioFile bufferLength:restBufferLength timeStamp:&timeStamp];
AEFreeAudioBufferList(restBufferList);
}
AEFreeAudioBufferList(bufferList);
ExtAudioFileDispose(audioFile);
if (status != noErr)
NSLog(#"An error has occurred");
else
NSLog(#"Finished writing to file at path: %#", path);
}
- (NSString *)filePath {
NSArray *documentsFolders =
NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *fileName = [NSString stringWithFormat:#"%#.m4a", [[NSUUID UUID] UUIDString]];
NSString *path = [documentsFolders[0] stringByAppendingPathComponent:fileName];
return path;
}
- (ExtAudioFileRef)createAndSetupExtAudioFileWithASBD:(AudioStreamBasicDescription const *)audioDescription
andFilePath:(NSString *)path {
AudioStreamBasicDescription destinationFormat;
memset(&destinationFormat, 0, sizeof(destinationFormat));
destinationFormat.mChannelsPerFrame = audioDescription->mChannelsPerFrame;
destinationFormat.mSampleRate = audioDescription->mSampleRate;
destinationFormat.mFormatID = kAudioFormatMPEG4AAC;
ExtAudioFileRef audioFile;
OSStatus status = ExtAudioFileCreateWithURL(
(__bridge CFURLRef) [NSURL fileURLWithPath:path],
kAudioFileM4AType,
&destinationFormat,
NULL,
kAudioFileFlags_EraseFile,
&audioFile
);
if (status != noErr) {
NSLog(#"Can not create ext audio file");
return nil;
}
UInt32 codecManufacturer = kAppleSoftwareAudioCodecManufacturer;
status = ExtAudioFileSetProperty(
audioFile, kExtAudioFileProperty_CodecManufacturer, sizeof(UInt32), &codecManufacturer
);
status = ExtAudioFileSetProperty(
audioFile, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), audioDescription
);
status = ExtAudioFileWriteAsync(audioFile, 0, NULL);
if (status != noErr) {
NSLog(#"Can not setup ext audio file");
return nil;
}
return audioFile;
}
- (OSStatus)renderToBufferList:(AudioBufferList *)bufferList
writeToFile:(ExtAudioFileRef)audioFile
bufferLength:(NSUInteger)bufferLength
timeStamp:(AudioTimeStamp *)timeStamp {
[self clearBufferList:bufferList];
AudioUnit outputUnit = self.engine.outputNode.audioUnit;
OSStatus status = AudioUnitRender(outputUnit, 0, timeStamp, 0, bufferLength, bufferList);
if (status != noErr) {
NSLog(#"Can not render audio unit");
return status;
}
timeStamp->mSampleTime += bufferLength;
status = ExtAudioFileWrite(audioFile, bufferLength, bufferList);
if (status != noErr)
NSLog(#"Can not write audio to file");
return status;
}
- (void)clearBufferList:(AudioBufferList *)bufferList {
for (int bufferIndex = 0; bufferIndex < bufferList->mNumberBuffers; bufferIndex++) {
memset(bufferList->mBuffers[bufferIndex].mData, 0, bufferList->mBuffers[bufferIndex].mDataByteSize);
}
}
I used some functions from this cool framework:
AudioBufferList *AEAllocateAndInitAudioBufferList(AudioStreamBasicDescription audioFormat, int frameCount) {
int numberOfBuffers = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? audioFormat.mChannelsPerFrame : 1;
int channelsPerBuffer = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? 1 : audioFormat.mChannelsPerFrame;
int bytesPerBuffer = audioFormat.mBytesPerFrame * frameCount;
AudioBufferList *audio = malloc(sizeof(AudioBufferList) + (numberOfBuffers-1)*sizeof(AudioBuffer));
if ( !audio ) {
return NULL;
}
audio->mNumberBuffers = numberOfBuffers;
for ( int i=0; i<numberOfBuffers; i++ ) {
if ( bytesPerBuffer > 0 ) {
audio->mBuffers[i].mData = calloc(bytesPerBuffer, 1);
if ( !audio->mBuffers[i].mData ) {
for ( int j=0; j<i; j++ ) free(audio->mBuffers[j].mData);
free(audio);
return NULL;
}
} else {
audio->mBuffers[i].mData = NULL;
}
audio->mBuffers[i].mDataByteSize = bytesPerBuffer;
audio->mBuffers[i].mNumberChannels = channelsPerBuffer;
}
return audio;
}
void AEFreeAudioBufferList(AudioBufferList *bufferList ) {
for ( int i=0; i<bufferList->mNumberBuffers; i++ ) {
if ( bufferList->mBuffers[i].mData ) free(bufferList->mBuffers[i].mData);
}
free(bufferList);
}
I'm working on a personal iOS project that requires full screen videos (15 seconds in length) to be uploaded to a backend over a 4G connection. While I can take videos just fine, the output size of the file comes out to 30MB which makes me think I'm doing something drastically wrong when it comes to compression. Below is the code I'm using to se up the AssetWriter:
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
NSLog(#"Started Recording! *******************");
self.movieWriter = [AVAssetWriter assetWriterWithURL:fileURL fileType:AVFileTypeMPEG4 error:nil];
[self.movieWriter setShouldOptimizeForNetworkUse:YES];
NSDictionary *videoCleanApertureSettings = #{
AVVideoCleanApertureWidthKey: [NSNumber numberWithFloat:self.view.frame.size.width],
AVVideoCleanApertureHeightKey: [NSNumber numberWithFloat:self.view.frame.size.height],
AVVideoCleanApertureHorizontalOffsetKey: [NSNumber numberWithInt:10],
AVVideoCleanApertureVerticalOffsetKey: [NSNumber numberWithInt:10],
};
NSDictionary *videoCompressionSettings = #{
AVVideoAverageBitRateKey: [NSNumber numberWithFloat:5000000.0],
AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInteger:1],
AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30,
AVVideoCleanApertureKey: videoCleanApertureSettings,
};
NSDictionary *videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithFloat:self.view.frame.size.width],
AVVideoHeightKey: [NSNumber numberWithFloat:self.view.frame.size.height],
AVVideoCompressionPropertiesKey: videoCompressionSettings,
};
self.movieWriterVideoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
self.movieWriterVideoInput.expectsMediaDataInRealTime = YES;
[self.movieWriter addInput:self.movieWriterVideoInput];
NSDictionary *audioSettings = #{AVFormatIDKey: [NSNumber numberWithInteger:kAudioFormatMPEG4AAC],
AVSampleRateKey: [NSNumber numberWithFloat:44100.0],
AVNumberOfChannelsKey: [NSNumber numberWithInteger:1],
};
self.movieWriterAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
self.movieWriterAudioInput.expectsMediaDataInRealTime = YES;
[self.movieWriter addInput:self.movieWriterAudioInput];
[self.movieWriter startWriting];
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Done Recording!");
[self.movieWriterVideoInput markAsFinished];
[self.movieWriterAudioInput markAsFinished];
[self.movieWriter finishWritingWithCompletionHandler:^{
AVURLAsset *compressedVideoAsset = [[AVURLAsset alloc] initWithURL:self.movieWriter.outputURL options:nil];
//Upload video to server
}];
}
For the setup of the actual session I'm using the following code:
//Indicate that some changes will be made to the session
[self.captureSession beginConfiguration];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureInput* currentCameraInput = [self.captureSession.inputs objectAtIndex:0];
for (AVCaptureInput *captureInput in self.captureSession.inputs) {
[self.captureSession removeInput:captureInput];
}
//Get currently selected camera and use for input
AVCaptureDevice *videoCamera = nil;
if(((AVCaptureDeviceInput*)currentCameraInput).device.position == AVCaptureDevicePositionBack)
{
videoCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
else
{
videoCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
//Add input to session
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCamera error:nil];
[self.captureSession addInput:newVideoInput];
//Add mic input to the session
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
[self.captureSession addInput:audioInput];
//Add movie output to session
for (AVCaptureOutput *output in self.captureSession.outputs) {
[self.captureSession removeOutput:output];
}
self.movieOutput = [AVCaptureMovieFileOutput new];
int32_t preferredTimeScale = 30; //Frames per second
self.movieOutput.maxRecordedDuration = CMTimeMakeWithSeconds(15, preferredTimeScale); //Setting the max video length
[self.captureSession addOutput:self.movieOutput];
//Commit all the configuration changes at once
[self.captureSession commitConfiguration];
I know that if I change AVCaptureSessionPresetHigh to a different preset I can reduce the file size of the final video, but unfortunately is looks like AVCaptureSessionPresetiFrame1280x720 is the only one that provides the full frame I'm trying to capture (which leaves me with an output size of about 20MB and is still too large for 4G uploads).
I've spent a lot of time googling and searching through other posts on Stack Overflow, but I can't seem to figure out what I'm doing wrong for the life of me and any help at all would be greatly appreciated.
You need a PhD to work with AVAssetWriter - it's non-trivial: https://developer.apple.com/library/mac/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/05_Export.html#//apple_ref/doc/uid/TP40010188-CH9-SW1
There's an amazing library for doing exactly what you want which is just an AVAssetExportSession drop-in replacement with more crucial features like changing the bit rate: https://github.com/rs/SDAVAssetExportSession
Here's how to use it:
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:[AVAsset assetWithURL:[info objectForKey:UIImagePickerControllerMediaURL]]];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
self.myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"lowerBitRate-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:self.myPathDocs];
encoder.outputURL=url;
encoder.outputFileType = AVFileTypeMPEG4;
encoder.shouldOptimizeForNetworkUse = YES;
encoder.videoSettings = #
{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoCompressionPropertiesKey: #
{
AVVideoAverageBitRateKey: #2300000, // Lower bit rate here
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40,
},
};
encoder.audioSettings = #
{
AVFormatIDKey: #(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: #2,
AVSampleRateKey: #44100,
AVEncoderBitRateKey: #128000,
};
[encoder exportAsynchronouslyWithCompletionHandler:^
{
int status = encoder.status;
if (status == AVAssetExportSessionStatusCompleted)
{
AVAssetTrack *videoTrack = nil;
AVURLAsset *asset = [AVAsset assetWithURL:encoder.outputURL];
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
videoTrack = [videoTracks objectAtIndex:0];
float frameRate = [videoTrack nominalFrameRate];
float bps = [videoTrack estimatedDataRate];
NSLog(#"Frame rate == %f",frameRate);
NSLog(#"bps rate == %f",bps/(1024.0 * 1024.0));
NSLog(#"Video export succeeded");
// encoder.outputURL <- this is what you want!!
}
else if (status == AVAssetExportSessionStatusCancelled)
{
NSLog(#"Video export cancelled");
}
else
{
NSLog(#"Video export failed with error: %# (%d)", encoder.error.localizedDescription, encoder.error.code);
}
}];
}
I want to change the container of .mov video files that I pick using
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was 3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}